Diff of the two buildlogs: -- --- b1/build.log 2024-04-02 12:28:56.838658244 +0000 +++ b2/build.log 2024-04-02 12:33:25.031195098 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Tue Apr 2 00:18:39 -12 2024 -I: pbuilder-time-stamp: 1712060319 +I: Current time: Tue May 6 08:52:00 +14 2025 +I: pbuilder-time-stamp: 1746471120 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/trixie-reproducible-base.tgz] I: copying local configuration @@ -30,54 +30,86 @@ dpkg-source: info: applying 02_parse_h_dependency I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/37675/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/40155/tmp/hooks/D01_modify_environment starting +debug: Running on ionos16-i386. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 May 5 18:52 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/40155/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/40155/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build/reproducible-path' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='i386' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=8 ' - DISTRIBUTION='trixie' - HOME='/root' - HOST_ARCH='i386' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="2" [2]="21" [3]="1" [4]="release" [5]="i686-pc-linux-gnu") + BASH_VERSION='5.2.21(1)-release' + BUILDDIR=/build/reproducible-path + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=i386 + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=21 ' + DIRSTACK=() + DISTRIBUTION=trixie + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=i686 + HOST_ARCH=i386 IFS=' ' - INVOCATION_ID='dfa3205228af4383a02e8e7e58bc3698' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - LD_LIBRARY_PATH='/usr/lib/libeatmydata' - LD_PRELOAD='libeatmydata.so' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='37675' - PS1='# ' - PS2='> ' + INVOCATION_ID=f842e0878d77456abd206e58a5f646c9 + LANG=C + LANGUAGE=de_CH:de + LC_ALL=C + LD_LIBRARY_PATH=/usr/lib/libeatmydata + LD_PRELOAD=libeatmydata.so + MACHTYPE=i686-pc-linux-gnu + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnu + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=40155 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.TicAAURu/pbuilderrc_LjK5 --distribution trixie --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/trixie-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.TicAAURu/b1 --logfile b1/build.log bison_3.8.2+dfsg-1.dsc' - SUDO_GID='112' - SUDO_UID='107' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://78.137.99.97:3128' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.TicAAURu/pbuilderrc_vCKd --distribution trixie --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/trixie-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.TicAAURu/b2 --logfile b2/build.log bison_3.8.2+dfsg-1.dsc' + SUDO_GID=112 + SUDO_UID=107 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://85.184.249.68:3128 I: uname -a - Linux ionos2-i386 6.1.0-18-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.76-1 (2024-02-01) x86_64 GNU/Linux + Linux i-capture-the-hostname 6.1.0-18-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.76-1 (2024-02-01) x86_64 GNU/Linux I: ls -l /bin - lrwxrwxrwx 1 root root 7 Apr 1 11:23 /bin -> usr/bin -I: user script /srv/workspace/pbuilder/37675/tmp/hooks/D02_print_environment finished + lrwxrwxrwx 1 root root 7 Apr 29 17:46 /bin -> usr/bin +I: user script /srv/workspace/pbuilder/40155/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -154,7 +186,7 @@ Get: 31 http://deb.debian.org/debian trixie/main i386 po-debconf all 1.0.21+nmu1 [248 kB] Get: 32 http://deb.debian.org/debian trixie/main i386 debhelper all 13.15.3 [901 kB] Get: 33 http://deb.debian.org/debian trixie/main i386 help2man i386 1.49.3 [198 kB] -Fetched 19.9 MB in 0s (62.3 MB/s) +Fetched 19.9 MB in 0s (92.5 MB/s) debconf: delaying package configuration, since apt-utils is not installed Selecting previously unselected package m4. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19876 files and directories currently installed.) @@ -306,7 +338,11 @@ fakeroot is already the newest version (1.33-1). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/reproducible-path/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../bison_3.8.2+dfsg-1_source.changes +I: user script /srv/workspace/pbuilder/40155/tmp/hooks/A99_set_merged_usr starting +Not re-configuring usrmerge for trixie +I: user script /srv/workspace/pbuilder/40155/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/reproducible-path/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../bison_3.8.2+dfsg-1_source.changes dpkg-buildpackage: info: source package bison dpkg-buildpackage: info: source version 2:3.8.2+dfsg-1 dpkg-buildpackage: info: source distribution unstable @@ -917,7 +953,7 @@ /build/reproducible-path/bison-3.8.2+dfsg/src/getargs.c make[1]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' dh_auto_build - make -j8 + make -j21 make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' rm -f examples/c/reccalc/scan.stamp examples/c/reccalc/scan.stamp.tmp rm -f lib/alloca.h-t lib/alloca.h && \ @@ -925,6 +961,7 @@ sed -e 's|@''HAVE_ALLOCA_H''@|1|g' < ./lib/alloca.in.h; \ } > lib/alloca.h-t && \ mv -f lib/alloca.h-t lib/alloca.h +/usr/bin/mkdir -p examples/c/reccalc rm -f lib/configmake.h-t && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ echo '#if HAVE_WINSOCK2_H'; \ @@ -960,7 +997,7 @@ echo '#define PKGLIBEXECDIR "/usr/libexec/bison"'; \ } | sed '/""/d' > lib/configmake.h-t && \ mv -f lib/configmake.h-t lib/configmake.h -/usr/bin/mkdir -p examples/c/reccalc +touch examples/c/reccalc/scan.stamp.tmp rm -f lib/fcntl.h-t lib/fcntl.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -987,6 +1024,7 @@ < ./lib/fcntl.in.h; \ } > lib/fcntl.h-t && \ mv lib/fcntl.h-t lib/fcntl.h +flex -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l rm -f lib/iconv.h-t lib/iconv.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1005,7 +1043,6 @@ < ./lib/iconv.in.h; \ } > lib/iconv.h-t && \ mv lib/iconv.h-t lib/iconv.h -touch examples/c/reccalc/scan.stamp.tmp rm -f lib/inttypes.h-t lib/inttypes.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's/@''HAVE_INTTYPES_H''@/1/g' \ @@ -1051,7 +1088,6 @@ < ./lib/limits.in.h; \ } > lib/limits.h-t && \ mv lib/limits.h-t lib/limits.h -flex -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l rm -f lib/locale.h-t lib/locale.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1373,6 +1409,7 @@ -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h'; \ } > lib/math.h-t && \ mv lib/math.h-t lib/math.h +mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp rm -f lib/sched.h-t lib/sched.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1600,7 +1637,6 @@ -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h'; \ } > lib/stdio.h-t && \ mv lib/stdio.h-t lib/stdio.h -mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp rm -f lib/stdlib.h-t lib/stdlib.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1925,6 +1961,7 @@ < ./lib/sys_stat.in.h; \ } > lib/sys/stat.h-t && \ mv lib/sys/stat.h-t lib/sys/stat.h +/usr/bin/mkdir -p lib/sys rm -f lib/sys/time.h-t lib/sys/time.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1945,8 +1982,6 @@ < ./lib/sys_time.in.h; \ } > lib/sys/time.h-t && \ mv lib/sys/time.h-t lib/sys/time.h -/usr/bin/mkdir -p lib/sys -/usr/bin/mkdir -p lib/sys rm -f lib/sys/times.h-t lib/sys/times.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1964,6 +1999,7 @@ } > lib/sys/times.h-t && \ mv lib/sys/times.h-t lib/sys/times.h /usr/bin/mkdir -p lib/sys +/usr/bin/mkdir -p lib/sys rm -f lib/sys/types.h-t lib/sys/types.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1976,19 +2012,6 @@ < ./lib/sys_types.in.h; \ } > lib/sys/types.h-t && \ mv lib/sys/types.h-t lib/sys/types.h -rm -f lib/sys/wait.h-t lib/sys/wait.h && \ -{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ - sed -e 's|@''GUARD_PREFIX''@|GL|g' \ - -e 's|@''INCLUDE_NEXT''@|include_next|g' \ - -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ - -e 's|@''PRAGMA_COLUMNS''@||g' \ - -e 's|@''NEXT_SYS_WAIT_H''@||g' \ - -e 's/@''GNULIB_WAITPID''@/1/g' \ - -e '/definitions of _GL_FUNCDECL_RPL/r ./lib/c++defs.h' \ - -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h' \ - < ./lib/sys_wait.in.h; \ -} > lib/sys/wait.h-t && \ -mv lib/sys/wait.h-t lib/sys/wait.h rm -f lib/termios.h-t lib/termios.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2004,6 +2027,19 @@ < ./lib/termios.in.h; \ } > lib/termios.h-t && \ mv lib/termios.h-t lib/termios.h +rm -f lib/sys/wait.h-t lib/sys/wait.h && \ +{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ + sed -e 's|@''GUARD_PREFIX''@|GL|g' \ + -e 's|@''INCLUDE_NEXT''@|include_next|g' \ + -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ + -e 's|@''PRAGMA_COLUMNS''@||g' \ + -e 's|@''NEXT_SYS_WAIT_H''@||g' \ + -e 's/@''GNULIB_WAITPID''@/1/g' \ + -e '/definitions of _GL_FUNCDECL_RPL/r ./lib/c++defs.h' \ + -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h' \ + < ./lib/sys_wait.in.h; \ +} > lib/sys/wait.h-t && \ +mv lib/sys/wait.h-t lib/sys/wait.h rm -f lib/time.h-t lib/time.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2423,6 +2459,20 @@ make[3]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg/gnulib-po' Making all in . make[3]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_map.o `test -f 'lib/gl_map.c' || echo './'`lib/gl_map.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-math.o `test -f 'lib/math.c' || echo './'`lib/math.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbchar.o `test -f 'lib/mbchar.c' || echo './'`lib/mbchar.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbfile.o `test -f 'lib/mbfile.c' || echo './'`lib/mbfile.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbswidth.o `test -f 'lib/mbswidth.c' || echo './'`lib/mbswidth.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_oset.o `test -f 'lib/gl_oset.c' || echo './'`lib/gl_oset.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-pipe2.o `test -f 'lib/pipe2.c' || echo './'`lib/pipe2.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-pipe2-safer.o `test -f 'lib/pipe2-safer.c' || echo './'`lib/pipe2-safer.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-printf-frexp.o `test -f 'lib/printf-frexp.c' || echo './'`lib/printf-frexp.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-printf-frexpl.o `test -f 'lib/printf-frexpl.c' || echo './'`lib/printf-frexpl.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-progname.o `test -f 'lib/progname.c' || echo './'`lib/progname.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-quotearg.o `test -f 'lib/quotearg.c' || echo './'`lib/quotearg.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_rbtree_oset.o `test -f 'lib/gl_rbtree_oset.c' || echo './'`lib/gl_rbtree_oset.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_rbtreehash_list.o `test -f 'lib/gl_rbtreehash_list.c' || echo './'`lib/gl_rbtreehash_list.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-setlocale_null.o `test -f 'lib/setlocale_null.c' || echo './'`lib/setlocale_null.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-sig-handler.o `test -f 'lib/sig-handler.c' || echo './'`lib/sig-handler.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-spawn-pipe.o `test -f 'lib/spawn-pipe.c' || echo './'`lib/spawn-pipe.c @@ -2490,6 +2540,8 @@ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-lalr.o `test -f 'src/lalr.c' || echo './'`src/lalr.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-location.o `test -f 'src/location.c' || echo './'`src/location.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-lr0.o `test -f 'src/lr0.c' || echo './'`src/lr0.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-lssi.o `test -f 'src/lssi.c' || echo './'`src/lssi.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-main.o `test -f 'src/main.c' || echo './'`src/main.c src/lalr.c: In function 'set_goto_map': src/lalr.c:152:26: warning: format '%ld' expects argument of type 'long int', but argument 5 has type 'goto_number' {aka 'unsigned int'} [-Wformat=] 152 | fprintf (stderr, "goto_map[%d (%s)] = %ld .. %ld\n", @@ -2507,8 +2559,6 @@ | ~~~~~~~~~~~~~~~~~ | | | goto_number {aka unsigned int} -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-lssi.o `test -f 'src/lssi.c' || echo './'`src/lssi.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-main.o `test -f 'src/main.c' || echo './'`src/main.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-muscle-tab.o `test -f 'src/muscle-tab.c' || echo './'`src/muscle-tab.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-named-ref.o `test -f 'src/named-ref.c' || echo './'`src/named-ref.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DINSTALLDIR=\"/usr/bin\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o src/bison-nullable.o `test -f 'src/nullable.c' || echo './'`src/nullable.c @@ -2551,13 +2601,6 @@ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-canonicalize.o `test -f 'lib/canonicalize.c' || echo './'`lib/canonicalize.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-careadlinkat.o `test -f 'lib/careadlinkat.c' || echo './'`lib/careadlinkat.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-cloexec.o `test -f 'lib/cloexec.c' || echo './'`lib/cloexec.c -lib/canonicalize.c: In function 'canonicalize_filename_mode': -lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] - 484 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." - | ^~~~~~~ -lib/canonicalize.c:485:5: warning: #warning "See ." [-Wcpp] - 485 | #warning "See ." - | ^~~~~~~ lib/careadlinkat.c: In function 'careadlinkat': lib/careadlinkat.c:178:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] 178 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." @@ -2574,6 +2617,13 @@ 181 | char stack_buf[STACK_BUF_SIZE]; | ^~~~~~~~~ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-close-stream.o `test -f 'lib/close-stream.c' || echo './'`lib/close-stream.c +lib/canonicalize.c: In function 'canonicalize_filename_mode': +lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] + 484 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." + | ^~~~~~~ +lib/canonicalize.c:485:5: warning: #warning "See ." [-Wcpp] + 485 | #warning "See ." + | ^~~~~~~ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-closeout.o `test -f 'lib/closeout.c' || echo './'`lib/closeout.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-concat-filename.o `test -f 'lib/concat-filename.c' || echo './'`lib/concat-filename.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname.o `test -f 'lib/dirname.c' || echo './'`lib/dirname.c @@ -2605,20 +2655,6 @@ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_list.o `test -f 'lib/gl_list.c' || echo './'`lib/gl_list.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-localcharset.o `test -f 'lib/localcharset.c' || echo './'`lib/localcharset.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/glthread/libbison_a-lock.o `test -f 'lib/glthread/lock.c' || echo './'`lib/glthread/lock.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_map.o `test -f 'lib/gl_map.c' || echo './'`lib/gl_map.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-math.o `test -f 'lib/math.c' || echo './'`lib/math.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbchar.o `test -f 'lib/mbchar.c' || echo './'`lib/mbchar.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbfile.o `test -f 'lib/mbfile.c' || echo './'`lib/mbfile.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbswidth.o `test -f 'lib/mbswidth.c' || echo './'`lib/mbswidth.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_oset.o `test -f 'lib/gl_oset.c' || echo './'`lib/gl_oset.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-pipe2.o `test -f 'lib/pipe2.c' || echo './'`lib/pipe2.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-pipe2-safer.o `test -f 'lib/pipe2-safer.c' || echo './'`lib/pipe2-safer.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-printf-frexp.o `test -f 'lib/printf-frexp.c' || echo './'`lib/printf-frexp.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-printf-frexpl.o `test -f 'lib/printf-frexpl.c' || echo './'`lib/printf-frexpl.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-progname.o `test -f 'lib/progname.c' || echo './'`lib/progname.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-quotearg.o `test -f 'lib/quotearg.c' || echo './'`lib/quotearg.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_rbtree_oset.o `test -f 'lib/gl_rbtree_oset.c' || echo './'`lib/gl_rbtree_oset.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_rbtreehash_list.o `test -f 'lib/gl_rbtreehash_list.c' || echo './'`lib/gl_rbtreehash_list.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_dupfree.o `test -f 'lib/malloc/scratch_buffer_dupfree.c' || echo './'`lib/malloc/scratch_buffer_dupfree.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_grow.o `test -f 'lib/malloc/scratch_buffer_grow.c' || echo './'`lib/malloc/scratch_buffer_grow.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_grow_preserve.o `test -f 'lib/malloc/scratch_buffer_grow_preserve.c' || echo './'`lib/malloc/scratch_buffer_grow_preserve.c @@ -2640,8 +2676,8 @@ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help -if /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then \ - /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man \ +if /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then \ + /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' help2man \ --include=./doc/bison.x \ --output=doc/bison.1.tmp tests/bison && \ { sed 's/^\(\.TH[^"]*"[^"]*"[^"]*\)"[^"]*"/\1/' doc/bison.1 >doc/bison.1a.tmp || true; } && \ @@ -2669,7 +2705,7 @@ make[2]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' make[1]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' dh_auto_test - make -j8 check "TESTSUITEFLAGS=-j8 --verbose" VERBOSE=1 + make -j21 check "TESTSUITEFLAGS=-j21 --verbose" VERBOSE=1 make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' if test -d ./.git \ && git --version >/dev/null 2>&1; then \ @@ -2706,89 +2742,88 @@ ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help make examples/c/calc/calc examples/c/glr/c++-types examples/c/lexcalc/lexcalc examples/c/mfcalc/mfcalc examples/c/pushcalc/calc examples/c/reccalc/reccalc examples/c/rpcalc/rpcalc examples/c++/calc++/calc++ examples/c++/glr/c++-types examples/c++/simple examples/c++/variant examples/c++/variant-11 ./tests/bison tests/atconfig tests/atlocal make[4]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' -/bin/bash ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex -/bin/bash ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex +/bin/sh ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines rm -f examples/c++/calc++/parser.stamp touch examples/c++/calc++/parser.stamp.tmp -./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/calc++/parser.cc examples/c++/calc++/parser.yy -updating examples/c/pushcalc/calc.output -updating examples/c/calc/calc.output -updating examples/c/glr/c++-types.output -updating examples/c/pushcalc/calc.h -updating examples/c/calc/calc.h \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex -updating examples/c/rpcalc/rpcalc.output -updating examples/c/mfcalc/mfcalc.output +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex rm -f examples/c++/glr/c++-types.stamp +./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/calc++/parser.cc examples/c++/calc++/parser.yy touch examples/c++/glr/c++-types.stamp.tmp ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/glr/c++-types.cc examples/c++/glr/c++-types.yy -updating examples/c/glr/c++-types.h -mv -f examples/c++/calc++/parser.stamp.tmp examples/c++/calc++/parser.stamp -updating examples/c/mfcalc/mfcalc.h -updating examples/c/rpcalc/rpcalc.h \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -updating examples/c/reccalc/parse.output +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines make[4]: 'tests/bison' is up to date. make[4]: Nothing to be done for 'tests/atconfig'. make[4]: 'tests/atlocal' is up to date. -updating examples/c/lexcalc/parse.output -updating examples/c/reccalc/parse.h +updating examples/c/rpcalc/rpcalc.output +mv -f examples/c++/calc++/parser.stamp.tmp examples/c++/calc++/parser.stamp +updating examples/c/calc/calc.output +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-driver.o `test -f 'examples/c++/calc++/driver.cc' || echo './'`examples/c++/calc++/driver.cc +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-scanner.o `test -f 'examples/c++/calc++/scanner.cc' || echo './'`examples/c++/calc++/scanner.cc +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-calc++.o `test -f 'examples/c++/calc++/calc++.cc' || echo './'`examples/c++/calc++/calc++.cc +g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-parser.o `test -f 'examples/c++/calc++/parser.cc' || echo './'`examples/c++/calc++/parser.cc +updating examples/c/pushcalc/calc.output +updating examples/c/rpcalc/rpcalc.h +updating examples/c/glr/c++-types.output +gcc -DEXEEXT=\"\" -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c +updating examples/c/calc/calc.h +updating examples/c/pushcalc/calc.h gcc -DEXEEXT=\"\" -I./examples/c/calc -I./examples/c/calc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/calc/examples_c_calc_calc-calc.o `test -f 'examples/c/calc/calc.c' || echo './'`examples/c/calc/calc.c -gcc -DEXEEXT=\"\" -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c -updating examples/c/lexcalc/parse.h -gcc -DEXEEXT=\"\" -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c gcc -DEXEEXT=\"\" -I./examples/c/pushcalc -I./examples/c/pushcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/pushcalc/examples_c_pushcalc_calc-calc.o `test -f 'examples/c/pushcalc/calc.c' || echo './'`examples/c/pushcalc/calc.c -mv -f examples/c++/glr/c++-types.stamp.tmp examples/c++/glr/c++-types.stamp -gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c updating examples/c++/simple.output +updating examples/c/lexcalc/parse.output +updating examples/c/glr/c++-types.h +gcc -DEXEEXT=\"\" -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c +updating examples/c/reccalc/parse.output updating examples/c++/simple.hh +g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/simple-simple.o `test -f 'examples/c++/simple.cc' || echo './'`examples/c++/simple.cc +updating examples/c/lexcalc/parse.h +gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o `test -f 'examples/c/lexcalc/parse.c' || echo './'`examples/c/lexcalc/parse.c +updating examples/c/mfcalc/mfcalc.output +gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o `test -f 'examples/c/lexcalc/scan.c' || echo './'`examples/c/lexcalc/scan.c +updating examples/c/reccalc/parse.h +gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o `test -f 'examples/c/reccalc/scan.c' || echo './'`examples/c/reccalc/scan.c +updating examples/c/mfcalc/mfcalc.h +gcc -DEXEEXT=\"\" -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c updating examples/c++/variant.output +mv -f examples/c++/glr/c++-types.stamp.tmp examples/c++/glr/c++-types.stamp updating examples/c++/variant-11.output -updating examples/c++/variant.hh -gcc -DEXEEXT=\"\" -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c +g++ -DEXEEXT=\"\" -I./examples/c++/glr -I./examples/c++/glr -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/glr/examples_c___glr_c___types-c++-types.o `test -f 'examples/c++/glr/c++-types.cc' || echo './'`examples/c++/glr/c++-types.cc updating examples/c++/variant-11.hh -g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/simple-simple.o `test -f 'examples/c++/simple.cc' || echo './'`examples/c++/simple.cc -g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/variant-variant.o `test -f 'examples/c++/variant.cc' || echo './'`examples/c++/variant.cc +updating examples/c++/variant.hh g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/variant_11-variant-11.o `test -f 'examples/c++/variant-11.cc' || echo './'`examples/c++/variant-11.cc -gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/calc/calc examples/c/calc/examples_c_calc_calc-calc.o -gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o `test -f 'examples/c/lexcalc/parse.c' || echo './'`examples/c/lexcalc/parse.c -gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o `test -f 'examples/c/lexcalc/scan.c' || echo './'`examples/c/lexcalc/scan.c -gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/mfcalc/mfcalc examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o -lm +g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/variant-variant.o `test -f 'examples/c++/variant.cc' || echo './'`examples/c++/variant.cc +gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/rpcalc/rpcalc examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o -lm gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/pushcalc/calc examples/c/pushcalc/examples_c_pushcalc_calc-calc.o +gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/mfcalc/mfcalc examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o -lm +gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/calc/calc examples/c/calc/examples_c_calc_calc-calc.o +gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/lexcalc/lexcalc examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/reccalc/reccalc examples/c/reccalc/examples_c_reccalc_reccalc-parse.o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o -gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/rpcalc/rpcalc examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o -lm -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-driver.o `test -f 'examples/c++/calc++/driver.cc' || echo './'`examples/c++/calc++/driver.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-scanner.o `test -f 'examples/c++/calc++/scanner.cc' || echo './'`examples/c++/calc++/scanner.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-calc++.o `test -f 'examples/c++/calc++/calc++.cc' || echo './'`examples/c++/calc++/calc++.cc -g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-parser.o `test -f 'examples/c++/calc++/parser.cc' || echo './'`examples/c++/calc++/parser.cc -g++ -DEXEEXT=\"\" -I./examples/c++/glr -I./examples/c++/glr -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/glr/examples_c___glr_c___types-c++-types.o `test -f 'examples/c++/glr/c++-types.cc' || echo './'`examples/c++/glr/c++-types.cc gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/glr/c++-types examples/c/glr/examples_c_glr_c___types-c++-types.o -gcc -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/lexcalc/lexcalc examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/simple examples/c++/simple-simple.o g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant-11 examples/c++/variant_11-variant-11.o -g++ -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant examples/c++/variant-variant.o g++ -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/calc++/calc++ examples/c++/calc++/calc__-driver.o examples/c++/calc++/calc__-scanner.o examples/c++/calc++/calc__-calc++.o examples/c++/calc++/calc__-parser.o +g++ -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant examples/c++/variant-variant.o g++ -std=c++14 -g -O2 -ffile-prefix-map=/build/reproducible-path/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/glr/c++-types examples/c++/glr/examples_c___glr_c___types-c++-types.o make[4]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' make check-TESTS check-local make[4]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' make all-recursive rm -f tests/package.m4 tests/package.m4.tmp -make[5]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' { \ echo '# Signature of the current package.'; \ echo 'm4_define([AT_PACKAGE_NAME], [GNU Bison])'; \ @@ -2799,7 +2834,8 @@ } >tests/package.m4.tmp mv tests/package.m4.tmp tests/package.m4 \ - /bin/bash '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp + /bin/sh '/build/reproducible-path/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp +make[5]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' make[5]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' Making all in po make[6]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg/po' @@ -2815,6 +2851,11 @@ make[6]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg/gnulib-po' Making all in . make[6]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' +PASS: examples/c/glr/c++-types.test +PASS: examples/c++/simple.test +PASS: examples/c++/variant.test +PASS: examples/c++/glr/c++-types.test +PASS: examples/c++/variant-11.test /usr/bin/mkdir -p doc LC_ALL=C tests/bison --version >doc/bison.help.tmp LC_ALL=C tests/bison --help | \ @@ -2823,17 +2864,12 @@ ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help make[6]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' make[5]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' -PASS: examples/c/glr/c++-types.test -PASS: examples/c/lexcalc/lexcalc.test PASS: examples/c/mfcalc/mfcalc.test -PASS: examples/c/calc/calc.test -PASS: examples/c/rpcalc/rpcalc.test +PASS: examples/c/lexcalc/lexcalc.test PASS: examples/c/pushcalc/calc.test -PASS: examples/c++/simple.test -PASS: examples/c++/glr/c++-types.test +PASS: examples/c/rpcalc/rpcalc.test +PASS: examples/c/calc/calc.test PASS: examples/c/reccalc/reccalc.test -PASS: examples/c++/variant.test -PASS: examples/c++/variant-11.test PASS: examples/c++/calc++/calc++.test ============================================================================ Testsuite summary for GNU Bison 3.8.2 @@ -2849,7 +2885,7 @@ make[5]: Leaving directory '/build/reproducible-path/bison-3.8.2+dfsg' "/usr/bin/perl" -pi -e 's/\@tb\@/\t/g' tests/testsuite.tmp mv tests/testsuite.tmp tests/testsuite -/bin/bash ./tests/testsuite -C tests -j8 --verbose +/bin/sh ./tests/testsuite -C tests -j21 --verbose ## --------------------------- ## ## GNU Bison 3.8.2 test suite. ## ## --------------------------- ## @@ -2861,24 +2897,42 @@ -1. m4.at:21: testing Generating Comments ... -3. input.at:58: testing Invalid options ... + + + + + + + + + 2. input.at:27: testing Invalid number of arguments ... ./input.at:29: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -./input.at:67: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -ferror=caret input.y -7. input.at:204: testing Yacc warnings ... + +1. m4.at:21: testing Generating Comments ... 4. input.at:83: testing Invalid inputs ... ./input.at:97: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' input.y || exit 77 +3. input.at:58: testing Invalid options ... +./input.at:67: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -ferror=caret input.y 5. input.at:147: testing Invalid inputs with {} ... + + ./input.at:162: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + +7. input.at:204: testing Yacc warnings ... +9. input.at:287: testing Invalid symbol declarations ... +6. input.at:173: testing Yacc warnings on symbols ... 8. input.at:238: testing Yacc's %type ... -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y ./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -S ./input.m4 input.y -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y -6. input.at:173: testing Yacc warnings on symbols ... -./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y 2.y ./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y +./input.at:304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y +10. input.at:341: testing Redefining the error token ... stderr: +11. input.at:401: testing Dangling aliases ... +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wdangling input.y +./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y bison: invalid argument 'error=caret' for '--feature' Valid arguments are: - 'none' @@ -2886,10 +2940,28 @@ - 'fixit', 'diagnostics-parseable-fixits' - 'syntax-only' - 'all' -./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +12. input.at:427: testing Symbol declarations ... +13. input.at:528: testing Invalid $n and @n ... +./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y ./input.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=error=itemsets input.y -./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --skeleton -5. input.at:147: ok +./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y +19. input.at:859: testing Symbol class redefinition ... +14. input.at:552: testing Type Clashes ... +./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y 2.y +20. input.at:899: testing Default %printer and %destructor redeclared ... +16. input.at:784: testing Unused values before symbol declarations ... +./input.at:565: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +21. input.at:970: testing Per-type %printer and %destructor redeclared ... +./input.at:987: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +15. input.at:774: testing Unused values ... +5. input.at:147: stderr: + ok +./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +17. input.at:794: testing Symbol redeclared ... +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y stderr: input.y:1.11: error: invalid null character 1 | %header "ð€ˆ" @@ -2924,7 +2996,8 @@ input.y:10.1-11.0: error: missing '%}' at end of file 10 | %{ | ^~ -stderr: +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +18. input.at:832: testing EOF redeclared ... ./input.at:104: "$PERL" -p -e 's{([\0\200\210\360\377])}{sprintf "\\x%02x", ord($1)}ge' stderr bison: invalid argument 'error=itemsets' for '--report' Valid arguments are: @@ -2935,14 +3008,15 @@ - 'solved' - 'counterexamples', 'cex' - 'all' +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:72: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror?all input.y +9. input.at:287: ok +13. input.at:528: ./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --skeleton + ok +./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +14. input.at:552: ok stderr: -bison: option '--skeleton' requires an argument -Try 'bison --help' for more information. -./input.at:43: sed -e \ - "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \ - stderr -stderr: +19. input.at:859: ok bison: invalid argument 'error?all' for '--warning' Valid arguments are: - 'all' @@ -2958,44 +3032,84 @@ - 'other' - 'precedence' - 'yacc' -3. input.at:58: 4. input.at:83: ok - ok - -2. input.at:27: ./m4.at:55: cat output.txt +21. input.at:970: ok +stderr: +3. input.at:58: 4. input.at:83: ok -9. input.at:287: testing Invalid symbol declarations ... -1. m4.at:21: ./input.at:304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y ok +bison: option '--skeleton' requires an argument +Try 'bison --help' for more information. +./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./m4.at:55: cat output.txt +./input.at:43: sed -e \ + "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \ + stderr + -9. input.at:287: ok +22. input.at:1013: testing Undefined symbols ... +./input.at:1023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +2. input.at:27: ok -11. input.at:401: testing Dangling aliases ... -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wdangling input.y -13. input.at:528: testing Invalid $n and @n ... -./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -12. input.at:427: testing Symbol declarations ... -./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y -13. input.at:528: ok -10. input.at:341: testing Redefining the error token ... -./input.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y + +20. input.at:899: ok + +23. input.at:1045: testing Unassociated types used for a printer or destructor ... +22. input.at:1013: ok +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +25. input.at:1139: testing Unused values with default %destructor ... +24. input.at:1074: testing Useless printers or destructors ... +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +27. input.at:1219: testing Duplicate string ... +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +1. m4.at:21: ok +26. input.at:1187: testing Unused values with per-type %destructor ... + +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y + +29. input.at:1275: testing Incompatible Aliases ... +./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y ./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +28. input.at:1247: testing Token collisions ... +./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -14. input.at:552: testing Type Clashes ... -./input.at:565: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +28. input.at:1247: ok +./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +30. input.at:1400: testing Torturing the Scanner ... +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror +./input.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror +32. input.at:1609: testing Require 1.0 ... ./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror -15. input.at:774: testing Unused values ... -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +31. input.at:1569: testing Typed symbol aliases ... +33. input.at:1610: testing Require 3.8.2 ... +./input.at:1610: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror + +./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y ./input.at:468: cat symbols.csv -14. input.at:552: ok +./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y 12. input.at:427: ok -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror -stderr: -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror +./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +34. input.at:1612: testing Require 100.0 ... +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +stderr: +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +stderr: +35. input.at:1619: testing String aliases for character tokens ... input.y:1.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] 1 | %nterm exp | ^~~~~~ @@ -3008,36 +3122,53 @@ input.y:4.6-13: error: POSIX Yacc does not support string literals [-Werror=yacc] 4 | exp: "number"; | ^~~~~~~~ -16. input.at:784: testing Unused values before symbol declarations ... -./input.at:390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y - -./input.at:182: sed 's,.*/$,,' stderr 1>&2 -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error -17. input.at:794: testing Symbol redeclared ... -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -stderr: -input.y:1.1-11: error: POSIX Yacc does not support %destructor [-Werror=yacc] - 1 | %destructor {} - | ^~~~~~~~~~~ -input.y:2.1-8: error: POSIX Yacc does not support %printer [-Werror=yacc] - 2 | %printer {} - | ^~~~~~~~ -input.y:6.9-20: error: POSIX Yacc does not support typed midrule actions [-Werror=yacc] - 6 | a: { $$ = 42; } { $$ = $1; }; - | ^~~~~~~~~~~~ -input.y:7.4-9: error: POSIX Yacc does not support %empty [-Werror=yacc] - 7 | b: %empty { $$ = 42; }; - | ^~~~~~ +./input.at:1632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +33. input.at:1610: stderr: + ok +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Werror +input.y:9.10-16: error: require bison 100.0, but have 3.8.2 +34. input.at:1612: ok stderr: +./input.at:182: sed 's,.*/$,,' stderr 1>&2 input.y:2.13-17: error: string literal "bar" not attached to a symbol [-Werror=dangling-alias] 2 | %type "bar" | ^~~~~ input.y:4.19-23: error: string literal "baz" not attached to a symbol [-Werror=dangling-alias] 4 | expr: "foo" "bar" "baz" | ^~~~~ -./input.at:216: sed 's,.*/$,,' stderr 1>&2 stderr: +input.y:1.12-14: error: symbol FOO redeclared [-Werror=other] + 1 | %token FOO FOO + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %token FOO FOO + | ^~~ +input.y:2.15-17: error: symbol BAR redeclared [-Werror=other] + 2 | %token BAR 12 BAR 12 + | ^~~ +input.y:2.8-10: note: previous declaration + 2 | %token BAR 12 BAR 12 + | ^~~ +input.y:3.14-16: error: symbol EOF redeclared [-Werror=other] + 3 | %token EOF 0 EOF 0 + | ^~~ +input.y:3.8-10: note: previous declaration + 3 | %token EOF 0 EOF 0 + | ^~~ +29. input.at:1275: ok + +stderr: +input.y:1.16-18: error: symbol FOO redeclared [-Werror=other] + 1 | %token FOO BAR FOO 0 + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %token FOO BAR FOO 0 + | ^~~ +stderr: + +31. input.at:1569: stderr: + ok +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error input.y:2.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] 2 | %nterm nterm1 | ^~~~~~ @@ -3062,325 +3193,54 @@ input.y:10.9-16: error: POSIX Yacc does not support string literals [-Werror=yacc] 10 | nterm3: "TOKEN3" | ^~~~~~~~ -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error -./input.at:253: sed 's,.*/$,,' stderr 1>&2 ./input.at:410: sed 's,.*/$,,' stderr 1>&2 -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none +32. input.at:1609: ok stderr: -input.y:1.12-14: error: symbol FOO redeclared [-Werror=other] - 1 | %token FOO FOO - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %token FOO FOO - | ^~~ -input.y:2.15-17: error: symbol BAR redeclared [-Werror=other] - 2 | %token BAR 12 BAR 12 - | ^~~ -input.y:2.8-10: note: previous declaration - 2 | %token BAR 12 BAR 12 - | ^~~ -input.y:3.14-16: error: symbol EOF redeclared [-Werror=other] - 3 | %token EOF 0 EOF 0 - | ^~~ -input.y:3.8-10: note: previous declaration - 3 | %token EOF 0 EOF 0 - | ^~~ -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none ./input.at:804: sed 's,.*/$,,' stderr 1>&2 -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +input.y:1.1-11: error: POSIX Yacc does not support %destructor [-Werror=yacc] + 1 | %destructor {} + | ^~~~~~~~~~~ +input.y:2.1-8: error: POSIX Yacc does not support %printer [-Werror=yacc] + 2 | %printer {} + | ^~~~~~~~ +input.y:6.9-20: error: POSIX Yacc does not support typed midrule actions [-Werror=yacc] + 6 | a: { $$ = 42; } { $$ = $1; }; + | ^~~~~~~~~~~~ +input.y:7.4-9: error: POSIX Yacc does not support %empty [-Werror=yacc] + 7 | b: %empty { $$ = 42; }; + | ^~~~~~ +./input.at:843: sed 's,.*/$,,' stderr 1>&2 +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:253: sed 's,.*/$,,' stderr 1>&2 +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error +37. input.at:1708: testing Numbered tokens ... +36. input.at:1642: testing Symbols ... +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc input.y +./input.at:1720: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret redecl.y +./input.at:216: sed 's,.*/$,,' stderr 1>&2 ./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -stderr: -stdout: -./input.at:391: $PREPARSER ./input -stderr: -./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -10. input.at:341: ok -6. input.at:173: ok - -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -11. input.at:401: ok - -18. input.at:832: testing EOF redeclared ... -19. input.at:859: testing Symbol class redefinition ... -./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - -7. input.at:204: ok -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -20. input.at:899: testing Default %printer and %destructor redeclared ... -./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - -19. input.at:859: ok -./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -21. input.at:970: testing Per-type %printer and %destructor redeclared ... -./input.at:987: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -20. input.at:899: ok -8. input.at:238: ok -21. input.at:970: ok - - -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none - -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -24. input.at:1074: testing Useless printers or destructors ... -23. input.at:1045: testing Unassociated types used for a printer or destructor ... - -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -25. input.at:1139: testing Unused values with default %destructor ... -22. input.at:1013: testing Undefined symbols ... -./input.at:1023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -22. input.at:1013: ok -17. input.at:794: ok -stderr: -input.y:1.16-18: error: symbol FOO redeclared [-Werror=other] - 1 | %token FOO BAR FOO 0 - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %token FOO BAR FOO 0 - | ^~~ -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -26. input.at:1187: testing Unused values with per-type %destructor ... -./input.at:843: sed 's,.*/$,,' stderr 1>&2 -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y stderr: -input.y:12.10-32: error: unset value: $$ [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~ -input.y:12.10-12: error: unused value: $1 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.18-20: error: unused value: $3 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.26-28: error: unused value: $5 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] - 13 | b: INT | %empty; - | ^~~~~~ -input.y:14.10-62: error: unset value: $$ [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:14.22-24: error: unused value: $3 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:14.43-45: error: unused value: $5 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:15.10-49: error: unset value: $$ [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:15.18-20: error: unused value: $3 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.30-32: error: unused value: $5 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:16.10-37: error: unset value: $$ [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:16.18-20: error: unused value: $3 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:16.27-29: error: unused value: $5 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:18.10-58: error: unset value: $$ [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:18.10-12: error: unused value: $1 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.31-33: error: unused value: $3 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.52-54: error: unused value: $5 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:19.10-72: error: unset value: $$ [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.10-12: error: unused value: $1 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.31-33: error: unused value: $3 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.66-68: error: unused value: $5 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:22.10-68: error: unset value: $$ [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:22.10-12: error: unused value: $1 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.14-16: error: unused value: $2 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:25.23-25: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.10-50: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.10-12: error: unused value: $1 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.23-25: error: unused value: $2 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.27-29: error: unused value: $3 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unused value: $4 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.44-46: error: unused value: $5 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:26.23-25: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -input.y:26.40-42: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -./input.at:775: sed 's,.*/$,,' stderr 1>&2 -27. input.at:1219: testing Duplicate string ... +input.y:6.8-22: error: unset value: $$ [-Werror=other] + 6 | start: end end { $1; } ; + | ^~~~~~~~~~~~~~~ +input.y:6.12-14: error: unused value: $2 [-Werror=other] + 6 | start: end end { $1; } ; + | ^~~ +input.y:7.6-8: error: unset value: $$ [-Werror=other] + 7 | end: { } ; + | ^~~ ./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -stderr: -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -stderr: -input.y:4.22-28: error: type is used, but is not associated to any symbol [-Werror=other] -input.y:5.25-31: error: type is used, but is not associated to any symbol [-Werror=other] -input.y:12.10-32: error: unset value: $$ [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~ -input.y:12.10-12: error: unused value: $1 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.18-20: error: unused value: $3 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:12.26-28: error: unused value: $5 [-Werror=other] - 12 | a: INT | INT { } INT { } INT { }; - | ^~~ -input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] - 13 | b: INT | %empty; - | ^~~~~~ -input.y:14.10-62: error: unset value: $$ [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:14.22-24: error: unused value: $3 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:14.43-45: error: unused value: $5 [-Werror=other] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~ -input.y:15.10-49: error: unset value: $$ [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:15.18-20: error: unused value: $3 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:15.30-32: error: unused value: $5 [-Werror=other] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ -input.y:16.10-37: error: unset value: $$ [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:16.18-20: error: unused value: $3 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:16.27-29: error: unused value: $5 [-Werror=other] - 16 | e: INT | INT { } INT { } INT { $1; }; - | ^~~ -input.y:18.10-58: error: unset value: $$ [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:18.10-12: error: unused value: $1 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.31-33: error: unused value: $3 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:18.52-54: error: unused value: $5 [-Werror=other] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~ -input.y:19.10-72: error: unset value: $$ [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:19.10-12: error: unused value: $1 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.31-33: error: unused value: $3 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:19.66-68: error: unused value: $5 [-Werror=other] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~ -input.y:22.10-68: error: unset value: $$ [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:22.10-12: error: unused value: $1 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:22.14-16: error: unused value: $2 [-Werror=other] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~ -input.y:25.23-25: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.10-50: error: unset value: $$ [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:25.10-12: error: unused value: $1 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.23-25: error: unused value: $2 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.27-29: error: unused value: $3 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.40-42: error: unused value: $4 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:25.44-46: error: unused value: $5 [-Werror=other] - 25 | n: INT | INT { } INT { } INT { }; - | ^~~ -input.y:26.23-25: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ -input.y:26.40-42: error: unset value: $$ [-Werror=other] - 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; - | ^~~ + +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +39. input.at:1805: testing %start after first rule ... +38. input.at:1750: testing Unclosed constructs ... +./input.at:1779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y +./input.at:1817: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:1199: sed 's,.*/$,,' stderr 1>&2 +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error stderr: -./input.at:1062: sed 's,.*/$,,' stderr 1>&2 input.y:6.8-45: error: unset value: $$ [-Werror=other] 6 | start: end end tagged tagged { $1; $3; } ; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3390,158 +3250,70 @@ input.y:7.6-8: error: unset value: $$ [-Werror=other] 7 | end: { } ; | ^~~ -./input.at:785: sed 's,.*/$,,' stderr 1>&2 -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:1152: sed 's,.*/$,,' stderr 1>&2 -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Werror -stderr: -input.y:16.13-19: error: useless %printer for type [-Werror=other] -input.y:17.16-22: error: useless %destructor for type [-Werror=other] -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:1085: sed 's,.*/$,,' stderr 1>&2 -stderr: -stderr: -input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other] -input.y:6.8-22: error: unset value: $$ [-Werror=other] - 6 | start: end end { $1; } ; - | ^~~~~~~~~~~~~~~ -input.y:6.12-14: error: unused value: $2 [-Werror=other] - 6 | start: end end { $1; } ; - | ^~~ -input.y:7.6-8: error: unset value: $$ [-Werror=other] - 7 | end: { } ; - | ^~~ -./input.at:1236: sed 's,.*/$,,' stderr 1>&2 -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=error -./input.at:1199: sed 's,.*/$,,' stderr 1>&2 -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -18. input.at:832: ok +38. input.at:1750: ok +35. input.at:1619: ok ./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error - -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -28. input.at:1247: testing Token collisions ... -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -28. input.at:1247: ok -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none - -27. input.at:1219: ok -29. input.at:1275: testing Incompatible Aliases ... -./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - -23. input.at:1045: ok -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -30. input.at:1400: testing Torturing the Scanner ... -./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y - -./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -31. input.at:1569: testing Typed symbol aliases ... -./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -26. input.at:1187: ok - -./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -32. input.at:1609: testing Require 1.0 ... -./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -31. input.at:1569: ok -./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y - -29. input.at:1275: ok +40. input.at:1826: testing Duplicate %start symbol ... stderr: -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -32. input.at:1609: ok -33. input.at:1610: testing Require 3.8.2 ... -./input.at:1610: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other] +./input.at:1152: sed 's,.*/$,,' stderr 1>&2 stderr: -input.y:6.23-28: error: unused value: $4 [-Werror=other] -input.y:8.9-11: error: unset value: $$ [-Werror=other] +input.y:4.22-28: error: type is used, but is not associated to any symbol [-Werror=other] +input.y:5.25-31: error: type is used, but is not associated to any symbol [-Werror=other] +./input.at:1236: sed 's,.*/$,,' stderr 1>&2 ./input.at:1555: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -34. input.at:1612: testing Require 100.0 ... -./input.at:1175: sed 's,.*/$,,' stderr 1>&2 -./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -35. input.at:1619: testing String aliases for character tokens ... -./input.at:1632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -stderr: -input.y:9.10-16: error: require bison 100.0, but have 3.8.2 -34. input.at:1612: ok -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -stderr: - -33. input.at:1610: ok -36. input.at:1642: testing Symbols ... -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc input.y -35. input.at:1619: ok - -stderr: -input.y:3.13-14: error: useless %printer for type <> [-Werror=other] -37. input.at:1708: testing Numbered tokens ... -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none - -./input.at:1720: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret redecl.y -./input.at:1116: sed 's,.*/$,,' stderr 1>&2 -38. input.at:1750: testing Unclosed constructs ... -./input.at:1779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -38. input.at:1750: ok -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1062: sed 's,.*/$,,' stderr 1>&2 +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=error +42. input.at:1916: testing %prec's token must be defined ... +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +39. input.at:1805: ok +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none +41. input.at:1895: testing %prec takes a token ... +./input.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -39. input.at:1805: testing %start after first rule ... -./input.at:1817: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +41. input.at:1895: ok ./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Werror -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +stderr: +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +input.y:16.13-19: error: useless %printer for type [-Werror=other] +input.y:17.16-22: error: useless %destructor for type [-Werror=other] +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none ./input.at:1735: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret too-large.y -37. input.at:1708: ok -39. input.at:1805: ok +43. input.at:1936: testing Reject unused %code qualifiers ... +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c.y +./input.at:1085: sed 's,.*/$,,' stderr 1>&2 -stderr: -input.y:1.1-5: error: POSIX Yacc does not support %code [-Werror=yacc] -input.y:9.8-16: error: POSIX Yacc forbids dashes in symbol names: WITH-DASH [-Werror=yacc] -input.y:10.21-34: error: POSIX Yacc does not support string literals [-Werror=yacc] -input.y:12.23-38: error: POSIX Yacc does not support string literals [-Werror=yacc] -input.y:13.1-5: error: POSIX Yacc does not support %code [-Werror=yacc] -input.y:20.8-16: error: POSIX Yacc forbids dashes in symbol names: with-dash [-Werror=yacc] -input.y:22.15-28: error: POSIX Yacc does not support string literals [-Werror=yacc] -input.y:24.17-32: error: POSIX Yacc does not support string literals [-Werror=yacc] +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +37. input.at:1708: ok +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +44. input.at:2025: testing Multiple %code ... +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none -40. input.at:1826: testing Duplicate %start symbol ... -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:1666: sed 's,.*/$,,' stderr 1>&2 +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none +45. input.at:2065: testing errors ... +./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-redefined.y stderr: -41. input.at:1895: testing %prec takes a token ... -./input.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -41. input.at:1895: ok stdout: - -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -42. input.at:1916: testing %prec's token must be defined ... -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:1556: $CC $CFLAGS $CPPFLAGS -c -o main.o main.c -25. input.at:1139: ok - -43. input.at:1936: testing Reject unused %code qualifiers ... -./input.at:1946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c.y +./input.at:391: $PREPARSER ./input +stderr: stderr: input.y:1.12-14: error: duplicate directive [-Werror=other] 1 | %start exp exp exp @@ -3556,46 +3328,97 @@ 1 | %start exp exp exp | ^~~ input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-unused.y +./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./input.at:1836: sed 's,.*/$,,' stderr 1>&2 -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=error -./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c-glr.y -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y +10. input.at:341: ok +stderr: +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +input.y:1.1-5: error: POSIX Yacc does not support %code [-Werror=yacc] +input.y:9.8-16: error: POSIX Yacc forbids dashes in symbol names: WITH-DASH [-Werror=yacc] +input.y:10.21-34: error: POSIX Yacc does not support string literals [-Werror=yacc] +input.y:12.23-38: error: POSIX Yacc does not support string literals [-Werror=yacc] +input.y:13.1-5: error: POSIX Yacc does not support %code [-Werror=yacc] +input.y:20.8-16: error: POSIX Yacc forbids dashes in symbol names: with-dash [-Werror=yacc] +input.y:22.15-28: error: POSIX Yacc does not support string literals [-Werror=yacc] +input.y:24.17-32: error: POSIX Yacc does not support string literals [-Werror=yacc] +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +6. input.at:173: ok ./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror +./input.at:1666: sed 's,.*/$,,' stderr 1>&2 + +./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c-glr.y stderr: input.y:2.8-17: error: token for %prec is not defined: PREC [-Werror=other] -stderr: -stdout: -./input.at:1557: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.o main.o $LIBS -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +18. input.at:832: ok +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=error ./input.at:1925: sed 's,.*/$,,' stderr 1>&2 + +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +46. input.at:2102: testing %define, --define, --force-define ... +./input.at:2054: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \ + -Fvar-ff=cmd-f1 -Fvar-ff=cmd-f2 \ + -Dvar-dfg=cmd-d -Fvar-dfg=cmd-f \ + -Fvar-fd=cmd-f -Dvar-fd=cmd-d \ + --skeleton ./skel.c input.y + +47. input.at:2170: testing "%define" Boolean variables ... +./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret Input.y +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none +48. input.at:2191: testing "%define" code variables ... +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy +17. input.at:794: ok +11. input.at:401: ok ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -stderr: +./input.at:2123: cat input.tab.c + +45. input.at:2065: ok +26. input.at:1187: ok +8. input.at:238: ok + +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +49. input.at:2224: testing "%define" keyword variables ... +./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dvar=cmd-d input-dg.y +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y + + +50. input.at:2257: testing "%define" enum variables ... ./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++.y -stdout: -./input.at:1558: $PREPARSER ./input -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +7. input.at:204: ok +./input.at:2269: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y + +51. input.at:2320: testing "%define" file variables ... +./input.at:2329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +52. input.at:2342: testing "%define" backward compatibility ... +./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y stderr: -./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y +stdout: +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none +53. input.at:2393: testing Unused api.pure ... +./input.at:2413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1556: $CC $CFLAGS $CPPFLAGS -c -o main.o main.c +47. input.at:2170: ok + +52. input.at:2342: ok ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -30. input.at:1400: ok -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror -stderr: -input.y:2.16-18: error: useless %printer for type <*> [-Werror=other] -./input.at:1124: sed 's,.*/$,,' stderr 1>&2 -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +27. input.at:1219: 23. input.at:1045: ok + ok +54. input.at:2429: testing C++ namespace reference errors ... + +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror +./input.at:2450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++-glr.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -44. input.at:2025: testing Multiple %code ... -./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: + input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; | ^~~~~~~~~~~~~~~~~~~~~~~ @@ -3611,12 +3434,6 @@ input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] 13 | b: INT | %empty; | ^~~~~~ -input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~ -input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~ input.y:14.10-62: error: unset value: $$ [-Werror=other] 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3626,9 +3443,6 @@ input.y:14.43-45: error: unused value: $5 [-Werror=other] 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; | ^~~ -input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ input.y:15.10-49: error: unset value: $$ [-Werror=other] 15 | d: INT | INT { } INT { $1; } INT { $2; }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3653,15 +3467,9 @@ input.y:18.10-12: error: unused value: $1 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ -input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ input.y:18.31-33: error: unused value: $3 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ -input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ input.y:18.52-54: error: unused value: $5 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ @@ -3674,15 +3482,9 @@ input.y:19.31-33: error: unused value: $3 [-Werror=other] 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; | ^~~ -input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input.y:19.66-68: error: unused value: $5 [-Werror=other] 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; | ^~~ -input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] - 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; - | ^~~~~~~~~~~~~~~~~~~~ input.y:22.10-68: error: unset value: $$ [-Werror=other] 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3692,9 +3494,6 @@ input.y:22.14-16: error: unused value: $2 [-Werror=other] 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; | ^~~ -input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input.y:25.23-25: error: unset value: $$ [-Werror=other] 25 | n: INT | INT { } INT { } INT { }; | ^~~ @@ -3725,14 +3524,16 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:776: sed 's,.*/$,,' stderr 1>&2 -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error -./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-@@.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-].y +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +55. input.at:2482: testing Bad character literals ... +./input.at:2484: +set x `LC_ALL=C ls -l 'empty.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='empty.y'; } || exit 77 +56. input.at:2543: testing Bad escapes in literals ... +./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77 +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:785: sed 's,.*/$,,' stderr 1>&2 stderr: input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; @@ -3749,12 +3550,6 @@ input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] 13 | b: INT | %empty; | ^~~~~~ -input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~ -input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] - 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; - | ^~~~~~~~~~~~~~~~ input.y:14.10-62: error: unset value: $$ [-Werror=other] 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3764,9 +3559,6 @@ input.y:14.43-45: error: unused value: $5 [-Werror=other] 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; | ^~~ -input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] - 15 | d: INT | INT { } INT { $1; } INT { $2; }; - | ^~~ input.y:15.10-49: error: unset value: $$ [-Werror=other] 15 | d: INT | INT { } INT { $1; } INT { $2; }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3791,15 +3583,9 @@ input.y:18.10-12: error: unused value: $1 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ -input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ input.y:18.31-33: error: unused value: $3 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ -input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] - 18 | g: INT | INT { $$; } INT { $$; } INT { }; - | ^~~~~~~~~~~~~~~~ input.y:18.52-54: error: unused value: $5 [-Werror=other] 18 | g: INT | INT { $$; } INT { $$; } INT { }; | ^~~ @@ -3812,15 +3598,9 @@ input.y:19.31-33: error: unused value: $3 [-Werror=other] 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; | ^~~ -input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] - 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input.y:19.66-68: error: unused value: $5 [-Werror=other] 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; | ^~~ -input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] - 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; - | ^~~~~~~~~~~~~~~~~~~~ input.y:22.10-68: error: unset value: $$ [-Werror=other] 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3830,9 +3610,6 @@ input.y:22.14-16: error: unused value: $2 [-Werror=other] 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; | ^~~ -input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] - 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ input.y:25.23-25: error: unset value: $$ [-Werror=other] 25 | n: INT | INT { } INT { } INT { }; | ^~~ @@ -3863,286 +3640,243 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -./input.at:1681: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./input.at:2054: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./input.at:786: sed 's,.*/$,,' stderr 1>&2 -42. input.at:1916: ok - -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none -45. input.at:2065: testing errors ... -./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-redefined.y -./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-unused.y -stderr: -input.y:1.16-18: error: duplicate directive [-Werror=other] - 1 | %start exp foo exp - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %start exp foo exp - | ^~~ -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -43. input.at:1936: ok - -./input.at:1859: sed 's,.*/$,,' stderr 1>&2 -46. input.at:2102: testing %define, --define, --force-define ... -./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \ - -Fvar-ff=cmd-f1 -Fvar-ff=cmd-f2 \ - -Dvar-dfg=cmd-d -Fvar-dfg=cmd-f \ - -Fvar-fd=cmd-f -Dvar-fd=cmd-d \ - --skeleton ./skel.c input.y -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error -./input.at:2123: cat input.tab.c -24. input.at:1074: ok -stderr: -stdout: -./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dvar=cmd-d input-dg.y - -./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -45. input.at:2065: ok -36. input.at:1642: ok -47. input.at:2170: testing "%define" Boolean variables ... -./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret Input.y - -./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y - -49. input.at:2224: testing "%define" keyword variables ... -./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -48. input.at:2191: testing "%define" code variables ... -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -stderr: -stdout: -./input.at:2055: $PREPARSER ./input -stderr: -./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror -44. input.at:2025: ./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror - ok -47. input.at:2170: ok -46. input.at:2102: ok - - - -50. input.at:2257: testing "%define" enum variables ... -./input.at:2269: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -51. input.at:2320: testing "%define" file variables ... -./input.at:2329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -52. input.at:2342: testing "%define" backward compatibility ... -./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -52. input.at:2342: ok -./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -stderr: -input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] -input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] -input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] -input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] -input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] - -stderr: -input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] -input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] -53. input.at:2393: testing Unused api.pure ... -./input.at:2413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none -./input.at:2213: sed 's,.*/$,,' stderr 1>&2 -./input.at:2246: sed 's,.*/$,,' stderr 1>&2 -51. input.at:2320: ok - -./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -54. input.at:2429: testing C++ namespace reference errors ... -./input.at:2450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -50. input.at:2257: ok -./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:2558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +57. input.at:2582: testing Unexpected end of file ... +./input.at:2586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -55. input.at:2482: testing Bad character literals ... -./input.at:2484: -set x `LC_ALL=C ls -l 'empty.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='empty.y'; } || exit 77 stderr: 0+0 records in 0+0 records out -0 bytes copied, 7.5885e-05 s, 0.0 kB/s +0 bytes copied, 5.4799e-05 s, 0.0 kB/s +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror stdout: +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error ./input.at:2490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret empty.y -./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2508: +./input.at:775: sed 's,.*/$,,' stderr 1>&2 +./input.at:2591: +set x `LC_ALL=C ls -l 'char.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='char.y'; } || exit 77 +./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++-glr.y +56. input.at:2543: ok +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +51. input.at:2320: ./input.at:2508: set x `LC_ALL=C ls -l 'two.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='two.y'; } || exit 77 -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -stderr: -input.y:2.8-10: error: duplicate directive [-Werror=other] - 2 | %start exp - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %start exp foo - | ^~~ -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none + ok +58. input.at:2675: testing LAC: Errors for %define ... stderr: +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y 0+0 records in 0+0 records out -0 bytes copied, 5.6853e-05 s, 0.0 kB/s +0 bytes copied, 5.5875e-05 s, 0.0 kB/s stdout: -./input.at:1877: sed 's,.*/$,,' stderr 1>&2 -./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret two.y -./input.at:2522: -set x `LC_ALL=C ls -l 'three.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='three.y'; } || exit 77 -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret char.y + +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +46. input.at:2102: ok stderr: -0+0 records in -0+0 records out -0 bytes copied, 5.6921e-05 s, 0.0 kB/s stdout: -./input.at:2528: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret three.y -./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -15. input.at:774: ok -55. input.at:2482: ok -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none - - -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -56. input.at:2543: testing Bad escapes in literals ... -./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77 -./input.at:2558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -57. input.at:2582: testing Unexpected end of file ... -./input.at:2586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:2591: -set x `LC_ALL=C ls -l 'char.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='char.y'; } || exit 77 stderr: + 0+0 records in 0+0 records out -0 bytes copied, 6.5194e-05 s, 0.0 kB/s +0 bytes copied, 5.7772e-05 s, 0.0 kB/s stdout: -./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret char.y -56. input.at:2543: ok - -./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./input.at:2604: set x `LC_ALL=C ls -l 'escape-in-char.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-char.y'; } || exit 77 -58. input.at:2675: testing LAC: Errors for %define ... -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y +./input.at:1557: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.o main.o $LIBS +./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret two.y +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none + +./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +59. input.at:2719: testing -Werror combinations ... stderr: +60. input.at:2764: testing %name-prefix and api.prefix are incompatible ... +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y 0+0 records in 0+0 records out -0 bytes copied, 5.3496e-05 s, 0.0 kB/s -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +0 bytes copied, 5.4353e-05 s, 0.0 kB/s +./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-deprecated input.y stdout: ./input.at:2607: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-char.y -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y +50. input.at:2257: ok +./input.at:2522: +set x `LC_ALL=C ls -l 'three.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='three.y'; } || exit 77 +61. input.at:2793: testing Redefined %union name ... +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +stderr: +input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] +input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +stderr: +0+0 records in +0+0 records out +0 bytes copied, 5.0412e-05 s, 0.0 kB/s +stdout: ./input.at:2617: set x `LC_ALL=C ls -l 'string.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='string.y'; } || exit 77 -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror -./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2213: sed 's,.*/$,,' stderr 1>&2 +./input.at:2528: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret three.y stderr: 0+0 records in 0+0 records out -0 bytes copied, 5.9373e-05 s, 0.0 kB/s +0 bytes copied, 5.0136e-05 s, 0.0 kB/s stdout: -./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret string.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y stderr: +./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret string.y + input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] -./input.at:2247: sed 's,.*/$,,' stderr 1>&2 -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y -16. input.at:784: ok +55. input.at:2482: ok +stderr: +stdout: +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error +./input.at:1558: $PREPARSER ./input ./input.at:2630: set x `LC_ALL=C ls -l 'escape-in-string.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-string.y'; } || exit 77 -40. input.at:1826: ok -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2246: sed 's,.*/$,,' stderr 1>&2 +stderr: +42. input.at:1916: stdout: +stderr: + ok +stderr: +62. input.at:2840: testing Stray $ or @ ... +input.y:6.23-28: error: unused value: $4 [-Werror=other] +input.y:8.9-11: error: unset value: $$ [-Werror=other] +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +./input.at:2055: $PREPARSER ./input +./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-@@.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y stderr: -input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] -input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 0+0 records in 0+0 records out -0 bytes copied, 5.8418e-05 s, 0.0 kB/s - +0 bytes copied, 5.0516e-05 s, 0.0 kB/s stdout: +./input.at:1175: sed 's,.*/$,,' stderr 1>&2 +44. input.at:2025: ok ./input.at:2633: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-string.y -./input.at:2214: sed 's,.*/$,,' stderr 1>&2 +30. input.at:1400: ok + +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error ./input.at:2643: set x `LC_ALL=C ls -l 'tstring.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='tstring.y'; } || exit 77 -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error -60. input.at:2764: testing %name-prefix and api.prefix are incompatible ... -./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-deprecated input.y -59. input.at:2719: testing -Werror combinations ... -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y + +./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y + stderr: 0+0 records in 0+0 records out -0 bytes copied, 5.8075e-05 s, 0.0 kB/s +0 bytes copied, 5.1281e-05 s, 0.0 kB/s stdout: +64. input.at:2946: testing Deprecated directives ... +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror ./input.at:2646: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret tstring.y -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:3019: cp errors-all experr +63. input.at:2883: testing Code injection ... +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y +65. input.at:3077: testing Unput's effect on locations ... +./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -ffixit input.y +66. input.at:3113: testing Non-deprecated directives ... +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y ./input.at:2656: set x `LC_ALL=C ls -l 'escape-in-tstring.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-tstring.y'; } || exit 77 -53. input.at:2393: ok -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +65. input.at:3077: ok stderr: 0+0 records in 0+0 records out -0 bytes copied, 6.4458e-05 s, 0.0 kB/s +0 bytes copied, 4.9902e-05 s, 0.0 kB/s stdout: ./input.at:2659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-tstring.y -57. input.at:2582: ok -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none -61. input.at:2793: testing Redefined %union name ... - -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror -62. input.at:2840: testing Stray $ or @ ... -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror -./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y -./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: +57. input.at:2582: ./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none + ok input.y:2.15: error: stray '$' [-Werror=other] +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror +./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-].y +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +67. input.at:3148: testing Cannot type action ... +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:2727: sed 's,.*/$,,' stderr 1>&2 +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +stderr: + +input.y:3.8-10: error: %define variable 'api.value.union.name' redefined [-Werror=other] +input.y:1.8-10: note: previous definition +input.y:4.1-32: error: %define variable 'api.value.union.name' redefined [-Werror=other] +input.y:3.8-10: note: previous definition +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error +./input.at:2808: sed 's,.*/$,,' stderr 1>&2 ./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y +68. input.at:3171: testing Character literals and api.token.raw ... +./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1681: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./input.at:3181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr +./input.at:3023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +68. input.at:3171: ok +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y +stderr: +input.y:1.16-18: error: duplicate directive [-Werror=other] + 1 | %start exp foo exp + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %start exp foo exp + | ^~~ +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] + +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1859: sed 's,.*/$,,' stderr 1>&2 +stderr: +input.y:3.13-14: error: useless %printer for type <> [-Werror=other] +69. input.at:3205: testing %token-table and parse.error ... +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +43. input.at:1936: ./input.at:1116: sed 's,.*/$,,' stderr 1>&2 + ok +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none stderr: input.y:11.19: error: stray '$' [-Werror=other] input.y:11.23: error: stray '@' [-Werror=other] @@ -4153,499 +3887,529 @@ input.y:16.19: error: stray '$' [-Werror=other] input.y:16.23: error: stray '@' [-Werror=other] input.y:17.19: error: stray '$' [-Werror=other] -stderr: -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none -input.y:3.8-10: error: %define variable 'api.value.union.name' redefined [-Werror=other] -input.y:1.8-10: note: previous definition -input.y:4.1-32: error: %define variable 'api.value.union.name' redefined [-Werror=other] -input.y:3.8-10: note: previous definition -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2727: sed 's,.*/$,,' stderr 1>&2 -./input.at:2808: sed 's,.*/$,,' stderr 1>&2 +./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none ./input.at:2861: sed 's,.*/$,,' stderr 1>&2 -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + ./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -p bar -Wno-deprecated input.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +stderr: ./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error -49. input.at:2224: ok -48. input.at:2191: ok - -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none -./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -63. input.at:2883: testing Code injection ... -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y +input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other] +input.y:13.1-18: note: previous declaration +input.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] ./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y -60. input.at:2764: ok - -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none - -54. input.at:2429: ok -64. input.at:2946: testing Deprecated directives ... -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none -./input.at:3019: cp errors-all experr -./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -ffixit input.y -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y -65. input.at:3077: testing Unput's effect on locations ... -./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -65. input.at:3077: ok -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y - - -66. input.at:3113: testing Non-deprecated directives ... -67. input.at:3148: testing Cannot type action ... -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -W input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y -./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr -./input.at:3023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Werror -stderr: -input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other] - 10 | exp: {} - | ^~~~~~~~ -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y -./input.at:3156: sed 's,.*/$,,' stderr 1>&2 +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y ./input.at:3027: rm -f output.c +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y +70. input.at:3231: testing Invalid file prefix mapping arguments ... +./input.at:3133: sed 's,.*/$,,' stderr 1>&2 ./input.at:3028: cp input.y input.y.orig -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -61. input.at:2793: ok +./input.at:3246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo input.y ./input.at:3029: sed -e '/fix-it/d' experr -62. input.at:2840: ok +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error ./input.at:3030: echo "bison: file 'input.y' was updated (backup: 'input.y~')" >>experr +stderr: ./input.at:3031: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update input.y +input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other] + 10 | exp: {} + | ^~~~~~~~ +./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --file-prefix-map foo input.y +./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: - -input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other] -input.y:13.1-18: note: previous declaration -input.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] - ./input.at:3034: diff input.y.orig input.y~ +./input.at:3156: sed 's,.*/$,,' stderr 1>&2 +stdout: ./input.at:3037: test ! -f output.c ./input.at:3040: sed -e '1,8d' input.y -./input.at:3133: sed 's,.*/$,,' stderr 1>&2 -stderr: -./input.at:3062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -input.y:2.15: error: stray '$' [-Werror=other] -./input.at:2730: sed 's,.*/$,,' stderr 1>&2 -68. input.at:3171: testing Character literals and api.token.raw ... -69. input.at:3205: testing %token-table and parse.error ... -./input.at:3181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=error -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -68. input.at:3171: ok -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none - -64. input.at:2946: ok -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none +./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo=bar -M baz input.y ./input.at:3221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y -70. input.at:3231: testing Invalid file prefix mapping arguments ... -./input.at:3246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo input.y - -./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --file-prefix-map foo input.y -71. named-refs.at:22: testing Tutorial calculator ... -67. input.at:3148: ./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo=bar -M baz input.y +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:3062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +36. input.at:1642: ./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ok -./named-refs.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none ./input.at:3249: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo= -M baz input.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none -69. input.at:3205: ok -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none - +25. input.at:1139: ok +60. input.at:2764: ok +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none 70. input.at:3231: ok -./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y + +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y + +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none +71. named-refs.at:22: testing Tutorial calculator ... 72. named-refs.at:196: testing Undefined and ambiguous references ... +./named-refs.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./named-refs.at:254: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y -72. named-refs.at:196: ok 73. named-refs.at:297: testing Misleading references ... ./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y 74. named-refs.at:316: testing Many kinds of errors ... ./named-refs.at:384: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -66. input.at:3113: ok - -58. input.at:2675: ok +72. named-refs.at:196: ok +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none ./named-refs.at:426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y -75. named-refs.at:551: testing Missing identifiers in brackets ... -./named-refs.at:559: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +69. input.at:3205: ok +64. input.at:2946: ok +./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-none input.y +./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -W input.y -75. named-refs.at:551: ok -76. named-refs.at:567: testing Redundant words in brackets ... +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none + +75. named-refs.at:551: testing Missing identifiers in brackets ... 74. named-refs.at:316: ok +./named-refs.at:559: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +76. named-refs.at:567: testing Redundant words in brackets ... +./named-refs.at:575: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y +75. named-refs.at:551: ok 77. named-refs.at:583: testing Comments in brackets ... ./named-refs.at:591: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./named-refs.at:575: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./named-refs.at:184: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y -77. named-refs.at:583: ok - +76. named-refs.at:567: ok ./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Werror -79. named-refs.at:618: testing Redundant words in LHS brackets ... +53. input.at:2393: ok 78. named-refs.at:599: testing Stray symbols in brackets ... -./named-refs.at:625: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none +77. named-refs.at:583: ok ./named-refs.at:607: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' test.y || exit 77 -80. named-refs.at:635: testing Factored LHS ... -./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -76. named-refs.at:567: ok -79. named-refs.at:618: ok +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none ./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Werror -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y + +79. named-refs.at:618: testing Redundant words in LHS brackets ... +./named-refs.at:625: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y 78. named-refs.at:599: ok +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Werror +./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +79. named-refs.at:618: ./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none + ok +80. named-refs.at:635: testing Factored LHS ... +./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./named-refs.at:184: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y +81. named-refs.at:648: testing Unresolved references ... +./named-refs.at:676: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +82. named-refs.at:715: testing $ or @ followed by . or - ... +./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none + +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret test.y + stderr: test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other] test.y:11.8-10: note: refers to: $foo at $1 test.y:11.12-18: note: possibly meant: $[foo.bar] at $2 -81. named-refs.at:648: testing Unresolved references ... -./named-refs.at:676: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +84. output.at:74: testing Output files: -dv >&- ... +./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac +./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv >&- foo.y ./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2 - 81. named-refs.at:648: ok -80. named-refs.at:635: ./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=error - ok - -82. named-refs.at:715: testing $ or @ followed by . or - ... -./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y 83. output.at:68: testing Output files: -dv ... -stderr: -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret test.y +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=error +67. input.at:3148: ok +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y ./output.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv foo.y +stderr: input.y:2.15: error: stray '$' [-Werror=other] +stderr: +input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] +input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] +input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] +input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] +input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] -84. output.at:74: testing Output files: -dv >&- ... -./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac -./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv >&- foo.y -./input.at:2733: sed 's,.*/$,,' stderr 1>&2 +66. input.at:3113: ok +./input.at:2247: sed 's,.*/$,,' stderr 1>&2 +./input.at:2730: sed 's,.*/$,,' stderr 1>&2 +stderr: + +62. input.at:2840: ok +61. input.at:2793: ok +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] +input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=error +80. named-refs.at:635: ok +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y + +./input.at:2214: sed 's,.*/$,,' stderr 1>&2 85. output.at:81: testing Output files: -dv -o foo.c ... ./output.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.c foo.y -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=error -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none -stderr: +86. output.at:84: testing Output files: -dv -y ... +./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error ./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Werror -./output.at:74: find . -type f | + +87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ... +./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y + +./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +88. output.at:92: testing Output files: -dv -o foo.tab.c ... +stderr: + +./output.at:68: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +90. output.at:97: testing Output files: -Hfoo.header -v -gfoo.gv --html=foo.html ... +90. output.at:97: ./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.tab.c foo.y + skipped (output.at:97) +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none stderr: -./output.at:68: find . -type f | +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y +./output.at:74: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +89. output.at:95: testing Output files: --fixed-output-files -dv -g --html ... +89. output.at:95: stderr: +83. output.at:68: ./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y + ok +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none + skipped (output.at:95) +input.y:2.8-10: error: duplicate directive [-Werror=other] + 2 | %start exp + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %start exp foo + | ^~~ +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] 84. output.at:74: ok -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none -83. output.at:68: ok -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y + + stderr: -./output.at:81: find . -type f | +stderr: +./input.at:1877: sed 's,.*/$,,' stderr 1>&2 +foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc] +./output.at:87: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none -stderr: -test.y:4.9: error: stray '$' [-Werror=other] -test.y:5.9: error: stray '@' [-Werror=other] -./output.at:81: grep '#include "foo.h"' foo.c - -./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2 -86. output.at:84: testing Output files: -dv -y ... -./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=error -stdout: -#include "foo.h" -87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ... -./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y -85. output.at:81: ok -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none -73. named-refs.at:297: ok -stderr: ./output.at:84: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none stderr: -stdout: - -./named-refs.at:185: $PREPARSER ./test input.txt -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y -stderr: - -./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none -./output.at:84: grep '#include "y.tab.h"' y.tab.c -stderr: -stdout: -86. output.at:84: ok -89. output.at:95: testing Output files: --fixed-output-files -dv -g --html ... -71. named-refs.at:22: ok -89. output.at:95: 88. output.at:92: testing Output files: -dv -o foo.tab.c ... - skipped (output.at:95) -./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.tab.c foo.y - +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +92. output.at:102: testing Output files: -dv -g --xml -y ... stderr: -foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc] -./output.at:87: find . -type f | +./output.at:87: grep '#include "./foo.h"' y.tab.c +./output.at:81: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +input.y:2.16-18: error: useless %printer for type <*> [-Werror=other] 91. output.at:100: testing Output files: -dv -g --xml --fixed-output-files ... -./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y -90. output.at:97: testing Output files: -Hfoo.header -v -gfoo.gv --html=foo.html ... -./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,none,other input.y -./output.at:87: grep '#include "./foo.h"' y.tab.c -90. output.at:97: skipped (output.at:97) +./output.at:84: grep '#include "y.tab.h"' y.tab.c stdout: #include "./foo.h" - -87. output.at:87: ok -92. output.at:102: testing Output files: -dv -g --xml -y ... -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none ./output.at:102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y stderr: - -93. output.at:104: testing Output files: %require "3.4" -dv -g --xml -y ... -./output.at:104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y -./output.at:92: find . -type f | +87. output.at:87: ./output.at:92: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 + ok +stdout: +stderr: 94. output.at:107: testing Output files: -dv -g --xml -o y.tab.c ... -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none +test.y:4.9: error: stray '$' [-Werror=other] +test.y:5.9: error: stray '@' [-Werror=other] +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y +86. output.at:84: ok +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none ./output.at:107: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -o y.tab.c foo.y +./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y 88. output.at:92: ok -./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,no-all,other input.y +./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2 +./output.at:81: grep '#include "foo.h"' foo.c +93. output.at:104: testing Output files: %require "3.4" -dv -g --xml -y ... +./input.at:1124: sed 's,.*/$,,' stderr 1>&2 +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y +stdout: + +#include "foo.h" +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=error +./output.at:104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y + +85. output.at:81: ok +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none + +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y +97. output.at:116: testing Output files: %header %verbose ... +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +96. output.at:112: testing Output files: -dv -g -o foo.c ... +95. output.at:110: testing Output files: -dv -b bar ... +./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -b bar foo.y +./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y stderr: -:6: warning: deprecated option: '--fixed-output-files', use '-o y.tab.c' [-Wdeprecated] -./output.at:100: find . -type f | +./output.at:102: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -82. named-refs.at:715: ok -91. output.at:100: ok +./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g -o foo.c foo.y +98. output.at:118: testing Output files: %header %verbose %yacc ... stderr: -./output.at:102: find . -type f | +stderr: +:6: warning: deprecated option: '--fixed-output-files', use '-o y.tab.c' [-Wdeprecated] +./output.at:100: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: -foo.y:1.1-8: warning: POSIX Yacc does not support %require [-Wyacc] -foo.y:1.10-14: warning: POSIX Yacc does not support string literals [-Wyacc] -./output.at:104: find . -type f | +./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-none input.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y +./output.at:107: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - - -95. output.at:110: testing Output files: -dv -b bar ... -./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -b bar foo.y -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y +73. named-refs.at:297: ok +92. output.at:102: ok +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +91. output.at:100: ok stderr: -./output.at:107: find . -type f | +foo.y:1.1-8: warning: POSIX Yacc does not support %require [-Wyacc] +foo.y:1.10-14: warning: POSIX Yacc does not support string literals [-Wyacc] +./output.at:104: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -92. output.at:102: ok -97. output.at:116: testing Output files: %header %verbose ... -96. output.at:112: testing Output files: -dv -g -o foo.c ... -./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -94. output.at:107: ok -93. output.at:104: ok -./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror -Wno-error=other input.y -./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g -o foo.c foo.y - - -98. output.at:118: testing Output files: %header %verbose %yacc ... -./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose ... -./output.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +54. input.at:2429: ok stderr: ./output.at:110: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -63. input.at:2883: ok -stderr: + +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none +93. output.at:104: stderr: + ok +99. output.at:121: testing Output files: %header %verbose %yacc ... ./output.at:116: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 95. output.at:110: ok -99. output.at:121: testing Output files: %header %verbose %yacc ... ./output.at:121: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy - -./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-error=other -Werror input.y -97. output.at:116: ok stderr: ./output.at:112: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 + +./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none + +96. output.at:112: ok +100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose ... +./output.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +97. output.at:116: 94. output.at:107: + ok + ok + +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Werror +101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc ... +./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y stderr: -./output.at:125: find . -type f | +103. output.at:136: testing Output files: %header %verbose ... +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none +./output.at:118: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc ... -./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y +stderr: +104. output.at:139: testing Output files: %header %verbose -o foo.c ... +stdout: -100. output.at:125: ok -96. output.at:112: ok +./named-refs.at:185: $PREPARSER ./test input.txt 102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc ... -./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -103. output.at:136: testing Output files: %header %verbose ... -./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c foo.yy stderr: -./output.at:118: find . -type f | +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +98. output.at:118: ok +./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +105. output.at:142: testing Output files: --header=foo.hpp -o foo.c++ ... +106. output.at:146: testing Output files: --header=foo.hpp -o foo.c++ ... +49. input.at:2224: ok +stderr: +./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy +./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./output.at:125: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./output.at:142: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy stderr: +107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ... +stderr: +./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +71. named-refs.at:22: ok +./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y ./output.at:121: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -104. output.at:139: testing Output files: %header %verbose -o foo.c ... -98. output.at:118: ok -./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror=other -Wno-other input.y +40. input.at:1826: ok +108. output.at:154: testing Output files: -o foo.c++ --graph=foo.gph ... stderr: -./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c foo.yy -99. output.at:121: ok -105. output.at:142: testing Output files: --header=foo.hpp -o foo.c++ ... -./output.at:142: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy -./output.at:127: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(foo.y|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 +100. output.at:125: ok +48. input.at:2191: ok +input.y:2.15: error: stray '$' [-Werror=other] +./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy stderr: -101. output.at:127: ok -./output.at:129: find . -type f | +./output.at:136: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.y|testsuite.log)$}; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -106. output.at:146: testing Output files: --header=foo.hpp -o foo.c++ ... -stderr: -./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy -./output.at:136: find . -type f | +./input.at:2733: sed 's,.*/$,,' stderr 1>&2 +82. named-refs.at:715: ok +stderr: +109. output.at:160: testing Output files: %type useless --header --graph --xml --report=all -Wall -Werror ... +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=error +./output.at:127: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -102. output.at:129: ok -107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ... +99. output.at:121: ./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y + ok 103. output.at:136: ok -108. output.at:154: testing Output files: -o foo.c++ --graph=foo.gph ... -./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy + +110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ... +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y + +111. output.at:173: testing Output files: %defines -o foo.c++ ... +./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y stderr: -./output.at:142: find . -type f | +stderr: +./output.at:139: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy -59. input.at:2719: ok -stderr: - -./output.at:139: find . -type f | +./output.at:146: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -109. output.at:160: testing Output files: %type useless --header --graph --xml --report=all -Wall -Werror ... -./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y -105. output.at:142: ok - -110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ... -./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y -104. output.at:139: ok -111. output.at:173: testing Output files: %defines -o foo.c++ ... -./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy - stderr: -./output.at:146: find . -type f | +101. output.at:127: ok + +24. input.at:1074: ok +./output.at:142: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +stderr: +./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +113. output.at:191: testing Output files: lalr1.cc ... +112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ... +./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy 106. output.at:146: ok -112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ... -stderr: ./output.at:150: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -113. output.at:191: testing Output files: lalr1.cc ... - ./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +114. output.at:194: testing Output files: lalr1.cc %verbose ... +104. output.at:139: ok + +./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy stderr: -./output.at:154: find . -type f | +107. output.at:150: ok +115. output.at:197: testing Output files: lalr1.cc %header %verbose ... +./output.at:129: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 + +./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +116. output.at:200: testing Output files: lalr1.cc %verbose %locations ... +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror + +./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +105. output.at:142: ok + +102. output.at:129: ok +./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +118. output.at:206: testing Output files: lalr1.cc %header %verbose ... + +stderr: +117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations ... +./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy stderr: -./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy foo.y:1.13-19: error: symbol 'useless' is used, but is not defined as a token and has no rules [-Werror=other] foo.y: error: 1 nonterminal useless in grammar [-Werror=other] foo.y:1.13-19: error: nonterminal useless in grammar: useless [-Werror=other] @@ -4654,386 +4418,553 @@ s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -107. output.at:150: ok -108. output.at:154: ok -109. output.at:160: ok -114. output.at:194: testing Output files: lalr1.cc %verbose ... -./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy - - +./output.at:154: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 -117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations ... +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none +119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ... +./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret subdir/foo.yy +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror +121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo" ... stderr: -115. output.at:197: testing Output files: lalr1.cc %header %verbose ... -./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -116. output.at:200: testing Output files: lalr1.cc %verbose %locations ... + +120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo" ... ./output.at:173: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 stderr: -./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy foo.y:1.1-15: error: %define variable 'useless' is not used -./output.at:167: find . -type f | +108. output.at:154: ./output.at:167: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 + ok stderr: -./output.at:176: find . -type f | +122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2" ... +./output.at:191: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -110. output.at:167: ok -111. output.at:173: ok -stderr: stderr: -./output.at:191: find . -type f | +111. output.at:173: 109. output.at:160: ok + ok +123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2" ... +./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:176: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +110. output.at:167: ok +stderr: +113. output.at:191: ok + ./output.at:194: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 112. output.at:176: ok - -113. output.at:191: 114. output.at:194: ok - ok - - - stderr: +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y ./output.at:197: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ... -120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo" ... -118. output.at:206: testing Output files: lalr1.cc %header %verbose ... -./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret subdir/foo.yy -stderr: + + stderr: ./output.at:200: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo" ... + +./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy +58. input.at:2675: 124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2" ... + ok + 115. output.at:197: ok +114. output.at:194: stderr: + ok +126. output.at:272: testing Conflicting output files: %header "foo.output" -v ... +125. output.at:267: testing Conflicting output files: --graph="foo.tab.c" ... +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y +stderr: +./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77 ./output.at:203: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2" ... -./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -116. output.at:200: ok -117. output.at:203: ok - - - -123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2" ... -./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2" ... -125. output.at:267: testing Conflicting output files: --graph="foo.tab.c" ... -./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77 -./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -./output.at:237: rm -f foo.yy.bak -stderr: -./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy +116. output.at:200: 127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ... + ok ./output.at:206: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v foo.y +128. output.at:282: testing Conflicting output files: -o foo.y ... +./output.at:237: rm -f foo.yy.bak + +./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none +./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o foo.y foo.y + +129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ... +./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77 +117. output.at:203: ok +./output.at:206: grep 'include .subdir/' foo.tab.cc ./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy +./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy +./output.at:206: grep 'include .subdir/' foo.tab.hh + +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="location.hh" foo.y +118. output.at:206: ok + +stderr: +./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y + ./output.at:226: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:206: grep 'include .subdir/' foo.tab.cc -./output.at:206: grep 'include .subdir/' foo.tab.hh -118. output.at:206: ok -122. output.at:226: ok stderr: -./output.at:237: find . -type f | +./output.at:231: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +131. output.at:336: testing Output file name: ) ... +./output.at:336: touch ").tmp" || exit 77 +122. output.at:226: + ok +132. output.at:337: testing Output file name: # ... +./output.at:337: touch "#.tmp" || exit 77 +123. output.at:231: ok +130. output.at:335: testing Output file name: ( ... +./output.at:335: touch "(.tmp" || exit 77 +./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y +134. output.at:339: testing Output file name: @{ ... +./output.at:339: touch "@{.tmp" || exit 77 stderr: -stderr: +./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").c" --header=").h" glr.y +133. output.at:338: testing Output file name: @@ ... +./output.at:338: touch "@@.tmp" || exit 77 ./output.at:215: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y +./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y +./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.c" --header="@@.h" glr.y +135. output.at:340: testing Output file name: @} ... +./output.at:340: touch "@}.tmp" || exit 77 +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror -./output.at:231: find . -type f | +120. output.at:215: ok +./output.at:282: cat foo.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Werror +128. output.at:282: stderr: + ok +./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y +stderr: +./output.at:237: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -124. output.at:237: ok -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror stderr: -120. output.at:215: ok -./output.at:210: find . -type f | + +./output.at:220: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -123. output.at:231: ok -126. output.at:272: testing Conflicting output files: %header "foo.output" -v ... -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v foo.y -stderr: -./output.at:220: find . -type f | +136. output.at:341: testing Output file name: [ ... +./output.at:341: touch "[.tmp" || exit 77 +./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y +137. output.at:342: testing Output file name: ] ... +./output.at:342: touch "].tmp" || exit 77 +./output.at:210: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:210: grep 'include .subdir/' subdir/foo.cc -127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ... -./output.at:210: grep 'include .subdir/' subdir/foo.hh - -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="location.hh" foo.y - - -119. output.at:210: ok -121. output.at:220: ok -128. output.at:282: testing Conflicting output files: -o foo.y ... -129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ... -./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77 -130. output.at:335: testing Output file name: ( ... -./output.at:335: touch "(.tmp" || exit 77 -./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y -./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o foo.y foo.y - -stderr: - -foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other] -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Werror -./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y -132. output.at:337: testing Output file name: # ... -./output.at:337: touch "#.tmp" || exit 77 -./output.at:267: sed 's,.*/$,,' stderr 1>&2 -131. output.at:336: testing Output file name: ) ... -./output.at:336: touch ").tmp" || exit 77 -./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Werror -./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").c" --header=").h" glr.y -./output.at:335: ls "(.c" "(.h" -stdout: -(.c -(.h -./output.at:282: cat foo.y +./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,none,other input.y ./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" +121. output.at:220: + ok +./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].c" --header="].h" glr.y +138. output.at:363: testing Graph with no conflicts ... +./output.at:210: grep 'include .subdir/' subdir/foo.cc +./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y stdout: -128. output.at:282: ok -./output.at:335: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "(.c" `~!@#$%^&*()-=_+{}[]|\:;<>, .'.c `~!@#$%^&*()-=_+{}[]|\:;<>, .'.h -stderr: -foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other] -./output.at:328: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" +./output.at:210: grep 'include .subdir/' subdir/foo.hh +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Werror +119. output.at:210: ./output.at:328: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" + ok -./output.at:272: sed 's,.*/$,,' stderr 1>&2 -stderr: +139. output.at:403: testing Graph with unsolved S/R ... ./output.at:337: ls "#.c" "#.h" -foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other] -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=error +stderr: +124. output.at:237: ok stdout: +./output.at:336: ls ").c" ").h" +foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other] #.c #.h -./output.at:336: ls ").c" ").h" +./output.at:335: ls "(.c" "(.h" +stdout: ./output.at:337: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "#.c" + +./output.at:338: ls "@@.c" "@@.h" stdout: +140. output.at:473: testing Graph with solved S/R ... +./output.at:339: ls "@{.c" "@{.h" ).c ).h -133. output.at:338: testing Output file name: @@ ... -./output.at:338: touch "@@.tmp" || exit 77 -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none ./output.at:336: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c ").c" -./output.at:277: sed 's,.*/$,,' stderr 1>&2 -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error -./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.c" --header="@@.h" glr.y -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none -./output.at:338: ls "@@.c" "@@.h" +./output.at:267: sed 's,.*/$,,' stderr 1>&2 stdout: +./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y @@.c @@.h -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none +(.c +(.h +./output.at:335: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "(.c" +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y +./output.at:403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y ./output.at:338: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@@.c" -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none -./output.at:267: cat foo.y -125. output.at:267: ok -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none - -./output.at:272: cat foo.y -126. output.at:272: ok -134. output.at:339: testing Output file name: @{ ... -./output.at:339: touch "@{.tmp" || exit 77 -./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y - -./output.at:277: cat foo.y -127. output.at:277: ok -135. output.at:340: testing Output file name: @} ... -./output.at:340: touch "@}.tmp" || exit 77 -./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y -./output.at:339: ls "@{.c" "@{.h" stdout: -@{.c -@{.h -./output.at:339: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@{.c" +stderr: -136. output.at:341: testing Output file name: [ ... -./output.at:341: touch "[.tmp" || exit 77 +foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other] +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error ./output.at:340: ls "@}.c" "@}.h" -./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y +141. output.at:538: testing Graph with R/R ... +@{.c +@{.h stdout: +./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y @}.c @}.h -./output.at:340: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@}.c" +./output.at:272: sed 's,.*/$,,' stderr 1>&2 +./output.at:339: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@{.c" ./output.at:341: ls "[.c" "[.h" +./output.at:340: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@}.c" stdout: [.c [.h -./output.at:341: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "[.c" -stderr: -stdout: -./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y -./output.at:335: ls "(.cc" "(.hh" -stdout: -(.cc -(.hh -./output.at:335: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "(.cc" -stderr: -stdout: -./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y -stderr: -stdout: -./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y -stderr: -stdout: -./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" -./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y -stdout: -./output.at:337: ls "#.cc" "#.hh" -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh -stdout: -#.cc -#.hh -./output.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" -stderr: -./output.at:337: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "#.cc" -stdout: -stderr: -stdout: -./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y -./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y -./output.at:336: ls ").cc" ").hh" -stdout: -).cc -).hh -./output.at:336: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c ").cc" -./output.at:339: ls "@{.cc" "@{.hh" -./output.at:338: ls "@@.cc" "@@.hh" -stdout: -@@.cc -@@.hh -stdout: -@{.cc -@{.hh -./output.at:338: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@@.cc" -./output.at:339: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@{.cc" stderr: +./output.at:341: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "[.c" +input.y:12.10-32: error: unset value: $$ [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~ +input.y:12.10-12: error: unused value: $1 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.18-20: error: unused value: $3 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.26-28: error: unused value: $5 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] + 13 | b: INT | %empty; + | ^~~~~~ +input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~ +input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~ +input.y:14.10-62: error: unset value: $$ [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:14.22-24: error: unused value: $3 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:14.43-45: error: unused value: $5 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.10-49: error: unset value: $$ [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:15.18-20: error: unused value: $3 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.30-32: error: unused value: $5 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:16.10-37: error: unset value: $$ [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:16.18-20: error: unused value: $3 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:16.27-29: error: unused value: $5 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:18.10-58: error: unset value: $$ [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:18.10-12: error: unused value: $1 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ +input.y:18.31-33: error: unused value: $3 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ +input.y:18.52-54: error: unused value: $5 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:19.10-72: error: unset value: $$ [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.10-12: error: unused value: $1 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.31-33: error: unused value: $3 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.66-68: error: unused value: $5 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] + 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; + | ^~~~~~~~~~~~~~~~~~~~ +input.y:22.10-68: error: unset value: $$ [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:22.10-12: error: unused value: $1 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.14-16: error: unused value: $2 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.23-25: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.10-50: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.10-12: error: unused value: $1 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.23-25: error: unused value: $2 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.27-29: error: unused value: $3 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unused value: $4 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.44-46: error: unused value: $5 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:26.23-25: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +input.y:26.40-42: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +142. output.at:576: testing Graph with reductions with multiple LAT ... +./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./output.at:342: ls "].c" "].h" +./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,no-all,other input.y stdout: -./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y -./output.at:340: ls "@}.cc" "@}.hh" -stdout: -@}.cc -@}.hh -./output.at:340: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@}.cc" +./input.at:776: sed 's,.*/$,,' stderr 1>&2 stderr: -stdout: -./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y -./output.at:341: ls "[.cc" "[.hh" -stdout: -[.cc -[.hh -./output.at:341: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "[.cc" stderr: -stdout: -130. output.at:335: ok - -137. output.at:342: testing Output file name: ] ... -./output.at:342: touch "].tmp" || exit 77 -./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].c" --header="].h" glr.y -./output.at:342: ls "].c" "].h" -stdout: ].c ].h +input.y:12.10-32: error: unset value: $$ [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~ +input.y:12.10-12: error: unused value: $1 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.18-20: error: unused value: $3 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:12.26-28: error: unused value: $5 [-Werror=other] + 12 | a: INT | INT { } INT { } INT { }; + | ^~~ +input.y:13.10-15: error: empty rule for typed nonterminal, and no action [-Werror=other] + 13 | b: INT | %empty; + | ^~~~~~ +input.y:14.14-20: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~ +input.y:14.26-41: error: unset value: $$ [-Werror=midrule-values] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~ +input.y:14.10-62: error: unset value: $$ [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:14.22-24: error: unused value: $3 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:14.43-45: error: unused value: $5 [-Werror=other] + 14 | c: INT | INT { $1; } INT { $2; } INT { $4; }; + | ^~~ +input.y:15.14-16: error: unset value: $$ [-Werror=midrule-values] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.10-49: error: unset value: $$ [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:15.18-20: error: unused value: $3 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:15.30-32: error: unused value: $5 [-Werror=other] + 15 | d: INT | INT { } INT { $1; } INT { $2; }; + | ^~~ +input.y:16.10-37: error: unset value: $$ [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:16.18-20: error: unused value: $3 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:16.27-29: error: unused value: $5 [-Werror=other] + 16 | e: INT | INT { } INT { } INT { $1; }; + | ^~~ +input.y:18.10-58: error: unset value: $$ [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:18.10-12: error: unused value: $1 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.14-29: error: unused value: $2 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ +input.y:18.31-33: error: unused value: $3 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:18.35-50: error: unused value: $4 [-Werror=midrule-values] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~~~~~~~~~~~~~~ +input.y:18.52-54: error: unused value: $5 [-Werror=other] + 18 | g: INT | INT { $$; } INT { $$; } INT { }; + | ^~~ +input.y:19.10-72: error: unset value: $$ [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.10-12: error: unused value: $1 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.31-33: error: unused value: $3 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:19.35-64: error: unused value: $4 [-Werror=midrule-values] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:19.66-68: error: unused value: $5 [-Werror=other] + 19 | h: INT | INT { $$; } INT { $$ = $2; } INT { }; + | ^~~ +input.y:21.18-37: error: unused value: $3 [-Werror=midrule-values] + 21 | j: INT | INT INT { $$ = 1; } { $$ = $1 + $2; }; + | ^~~~~~~~~~~~~~~~~~~~ +input.y:22.10-68: error: unset value: $$ [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:22.10-12: error: unused value: $1 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.14-16: error: unused value: $2 [-Werror=other] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~ +input.y:22.35-64: error: unused value: $4 [-Werror=midrule-values] + 22 | k: INT | INT INT { $$; } { $$ = $3; } { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.23-25: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.10-50: error: unset value: $$ [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:25.10-12: error: unused value: $1 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.23-25: error: unused value: $2 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.27-29: error: unused value: $3 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.40-42: error: unused value: $4 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:25.44-46: error: unused value: $5 [-Werror=other] + 25 | n: INT | INT { } INT { } INT { }; + | ^~~ +input.y:26.23-25: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +input.y:26.40-42: error: unset value: $$ [-Werror=other] + 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; + | ^~~ +foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other] ./output.at:342: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "].c" +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=error stderr: -stdout: -133. output.at:338: ok - -stderr: -stdout: -138. output.at:363: testing Graph with no conflicts ... -./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -stderr: -134. output.at:339: ok -stdout: -132. output.at:337: ok -stderr: -stdout: -129. output.at:328: ok - -stderr: - -stdout: -stderr: -131. output.at:336: ok ./output.at:363: grep -v // input.gv -139. output.at:403: testing Graph with unsolved S/R ... -./output.at:403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y - +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error +./output.at:277: sed 's,.*/$,,' stderr 1>&2 +./input.at:786: sed 's,.*/$,,' stderr 1>&2 138. output.at:363: ok -140. output.at:473: testing Graph with solved S/R ... -./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y - -141. output.at:538: testing Graph with R/R ... -./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y - stderr: +63. input.at:2883: ok +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error input.y: warning: 3 shift/reduce conflicts [-Wconflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:10.10-18: warning: rule useless in parser due to conflicts [-Wother] input.y:11.10-18: warning: rule useless in parser due to conflicts [-Wother] input.y:12.10-18: warning: rule useless in parser due to conflicts [-Wother] +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none ./output.at:403: grep -v // input.gv -139. output.at:403: 142. output.at:576: testing Graph with reductions with multiple LAT ... - ok -./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -143. output.at:641: testing Graph with a reduction rule both enabled and disabled ... -./output.at:641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error +139. output.at:403: ok + stderr: input.y:6.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:14.10-18: warning: rule useless in parser due to conflicts [-Wother] @@ -5041,10 +4972,16 @@ ./output.at:473: grep -v // input.gv stderr: +140. output.at:473: ok + +./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror -Wno-error=other input.y input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:4.3: warning: rule useless in parser due to conflicts [-Wother] +143. output.at:641: testing Graph with a reduction rule both enabled and disabled ... +./output.at:641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y ./output.at:538: grep -v // input.gv +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none 144. output.at:744: testing C++ Output File Prefix Mapping ... stderr: input.y: warning: 3 reduce/reduce conflicts [-Wconflicts-rr] @@ -5053,28 +4990,24 @@ input.y:5.3: warning: rule useless in parser due to conflicts [-Wother] ./output.at:576: grep -v // input.gv ./output.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x1.cc -M out/=bar/ x1.yy -140. output.at:473: ok -stderr: -stdout: +145. diagnostics.at:84: testing Warnings ... -135. output.at:340: ok +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none 141. output.at:538: ok 142. output.at:576: ok -145. diagnostics.at:84: testing Warnings ... -./output.at:775: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc - - - ./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y 146. diagnostics.at:133: testing Single point locations ... + +./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y + +./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-error=other -Werror input.y 147. diagnostics.at:182: testing Line is too short, and then you die ... -stderr: +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none +./output.at:775: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc +./output.at:267: cat foo.y 148. diagnostics.at:217: testing Zero-width characters ... -./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -stdout: -./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y -./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none ./diagnostics.at:84: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5083,7 +5016,9 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +125. output.at:267: ok +./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:182: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; @@ -5093,8 +5028,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 + ./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -stderr: ./diagnostics.at:217: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5103,9 +5038,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -input.y: warning: 4 shift/reduce conflicts [-Wconflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./output.at:641: grep -v // input.gv +./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y ./diagnostics.at:133: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5114,33 +5048,35 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./output.at:342: ls "].cc" "].hh" -./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -stdout: -143. output.at:641: ok -].cc -].hh 147. diagnostics.at:182: ok -./output.at:342: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "].cc" +./output.at:272: cat foo.y +./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +149. diagnostics.at:235: testing Tabulations and multibyte characters ... 148. diagnostics.at:217: ok -145. diagnostics.at:84: ok +126. output.at:272: ok +./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror=other -Wno-other input.y +./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./output.at:277: cat foo.y +150. diagnostics.at:282: testing Tabulations and multibyte characters ... +151. diagnostics.at:303: testing Special files ... +127. output.at:277: ok +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none +145. diagnostics.at:84: ./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none + ok -150. diagnostics.at:282: testing Tabulations and multibyte characters ... -149. diagnostics.at:235: testing Tabulations and multibyte characters ... 152. diagnostics.at:328: testing Complaints from M4 ... -146. diagnostics.at:133: ok -151. diagnostics.at:303: testing Special files ... -./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +59. input.at:2719: ok +./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +146. diagnostics.at:133: ok 153. diagnostics.at:351: testing Carriage return ... -./diagnostics.at:303: "$PERL" -pi -e ' + +./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:235: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5148,8 +5084,16 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:235: "$PERL" -pi -e ' + +./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +154. diagnostics.at:372: testing CR NL ... +./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +stderr: +input.y: warning: 4 shift/reduce conflicts [-Wconflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./output.at:641: grep -v // input.gv +./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:282: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5157,7 +5101,10 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:282: "$PERL" -pi -e ' +156. diagnostics.at:432: testing Screen width: 80 columns ... +155. diagnostics.at:399: testing Screen width: 200 columns ... +./diagnostics.at:372: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +./diagnostics.at:351: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5165,9 +5112,10 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -stderr: -stdout: -./diagnostics.at:328: "$PERL" -pi -e ' +./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +143. output.at:641: ok +./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:303: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5175,14 +5123,15 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -136. output.at:341: ok -./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 -./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -151. diagnostics.at:303: ok -./diagnostics.at:351: "$PERL" -pi -e ' +./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y + +153. diagnostics.at:351: ok +157. diagnostics.at:465: testing Screen width: 60 columns ... +149. diagnostics.at:235: ok +./diagnostics.at:328: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5190,30 +5139,10 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -150. diagnostics.at:282: ok - -./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -153. diagnostics.at:351: ok -152. diagnostics.at:328: ok -154. diagnostics.at:372: testing CR NL ... -155. diagnostics.at:399: testing Screen width: 200 columns ... -149. diagnostics.at:235: - ok - - -./diagnostics.at:372: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 -157. diagnostics.at:465: testing Screen width: 60 columns ... -./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -156. diagnostics.at:432: testing Screen width: 80 columns ... -158. diagnostics.at:504: testing Suggestions ... -159. diagnostics.at:527: testing Counterexamples ... -./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +150. diagnostics.at:282: ok ./diagnostics.at:372: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5222,8 +5151,9 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 +./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:504: "$PERL" -pi -e ' +./diagnostics.at:399: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5231,7 +5161,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:527: "$PERL" -pi -e ' +158. diagnostics.at:504: testing Suggestions ... +./diagnostics.at:432: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5239,9 +5170,15 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:399: "$PERL" -pi -e ' +159. diagnostics.at:527: testing Counterexamples ... +./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y +./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y +151. diagnostics.at:303: ok +./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y + + +./diagnostics.at:504: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5249,7 +5186,14 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:432: "$PERL" -pi -e ' +160. diagnostics.at:645: testing Deep Counterexamples ... +./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none +./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +158. diagnostics.at:504: ok +161. diagnostics.at:713: testing Indentation with message suppression ... +./diagnostics.at:465: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5257,9 +5201,11 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -158. diagnostics.at:504: ./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y - ok -./diagnostics.at:465: "$PERL" -pi -e ' +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-other input.y +154. diagnostics.at:372: ok +152. diagnostics.at:328: ok +./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret -Wall,cex input.y +155. diagnostics.at:399: ./diagnostics.at:645: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5267,24 +5213,17 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret -Wall,cex input.y -./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y - -154. diagnostics.at:372: ok -157. diagnostics.at:465: ok -160. diagnostics.at:645: testing Deep Counterexamples ... -159. diagnostics.at:527: ok -./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y + ok 156. diagnostics.at:432: ok -161. diagnostics.at:713: testing Indentation with message suppression ... -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-other input.y -155. diagnostics.at:399: ok +./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:645: "$PERL" -pi -e ' +163. skeletons.at:85: testing Installed skeleton file names ... +./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y +./diagnostics.at:527: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5292,29 +5231,38 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -163. skeletons.at:85: testing Installed skeleton file names ... +164. skeletons.at:142: testing Boolean=variables: invalid skeleton defaults ... +./skeletons.at:155: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y 162. skeletons.at:25: testing Relative skeleton file names ... ./skeletons.at:27: mkdir tmp -./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y -./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret tmp/input-gram.y -160. diagnostics.at:645: ok +166. skeletons.at:248: testing Fatal errors make M4 exit immediately ... 165. skeletons.at:166: testing Complaining during macro argument expansion ... -164. skeletons.at:142: testing Boolean=variables: invalid skeleton defaults ... -./skeletons.at:155: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y ./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y -./skeletons.at:64: cat input-gram.tab.c +160. diagnostics.at:645: ok +./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret tmp/input-gram.y ./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror -./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-gram.y -./skeletons.at:121: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS +157. diagnostics.at:465: ok +./skeletons.at:64: cat input-gram.tab.c 164. skeletons.at:142: ok -166. skeletons.at:248: testing Fatal errors make M4 exit immediately ... -./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y -./skeletons.at:69: cat input-gram.tab.c -stderr: -./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y +167. skeletons.at:302: testing Fatal errors but M4 continues producing output ... +./skeletons.at:314: "$PERL" gen-skel.pl > skel.c || exit 77 + +./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y +./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +168. sets.at:27: testing Nullable ... +./sets.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y +159. diagnostics.at:527: ok +./skeletons.at:121: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS +./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-gram.y + +stderr: +169. sets.at:111: testing Broken Closure ... +./sets.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y input.y:2.1-12: error: deprecated directive: '%pure-parser', use '%define api.pure' [-Werror=deprecated] 2 | %pure-parser | ^~~~~~~~~~~~ @@ -5323,26 +5271,12 @@ 3 | %error-verbose | ^~~~~~~~~~~~~~ | %define parse.error verbose -./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -./skeletons.at:74: cat input-cmd-line.tab.c -./diagnostics.at:725: sed 's,.*/$,,' stderr 1>&2 -167. skeletons.at:302: testing Fatal errors but M4 continues producing output ... -162. skeletons.at:25: ok -./skeletons.at:314: "$PERL" gen-skel.pl > skel.c || exit 77 -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=error -./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - -./skeletons.at:223: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input3.y -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none -168. sets.at:27: testing Nullable ... -./sets.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y -166. skeletons.at:248: ok 167. skeletons.at:302: ok +170. sets.at:153: testing Firsts ... +./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +166. skeletons.at:248: ok stderr: - -./skeletons.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input4.y bison (GNU Bison) 3.8.2 RITEM 0: e $end (rule 0) @@ -5414,139 +5348,14 @@ State 3: rule 0: -./sets.at:43: sed -f extract.sed stderr -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=none -Werror --trace=none -169. sets.at:111: testing Broken Closure ... -./sets.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y -170. sets.at:153: testing Firsts ... -./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y -168. sets.at:27: ok -165. skeletons.at:166: ok +./sets.at:43: sed -f extract.sed stderr +./skeletons.at:223: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input3.y +172. sets.at:269: testing Build relations ... 171. sets.at:228: testing Accept ... -stderr: -stderr: -bison (GNU Bison) 3.8.2 -RITEM - 0: exp $end (rule 0) - 3: exp '<' exp (rule 1) - 7: exp '>' exp (rule 2) - 11: exp '+' exp (rule 3) - 15: exp '-' exp (rule 4) - 19: exp '^' exp (rule 5) - 23: exp '=' exp (rule 6) - 27: "exp" (rule 7) - - -DERIVES - $accept derives - 0 exp $end - exp derives - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - - -NULLABLE - $accept: no - exp: no - - -RTC: Firsts Input BEGIN - - 01 - .--. - 0| 1| - 1| 1| - `--' -RTC: Firsts Input END - -RTC: Firsts Output BEGIN - - 01 - .--. - 0|11| - 1| 1| - `--' -RTC: Firsts Output END - -FIRSTS - $accept firsts - $accept - exp - exp firsts - exp - - -FDERIVES - $accept derives - 0 exp $end - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - exp derives - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - - -relation_transpose: - 0: 1 2 3 4 5 6 - 1: 1 2 3 4 5 6 - 2: 1 2 3 4 5 6 - 3: 1 2 3 4 5 6 - 4: 1 2 3 4 5 6 - 5: 1 2 3 4 5 6 - 6: 1 2 3 4 5 6 - -relation_transpose: output: - 1: 0 1 2 3 4 5 6 - 2: 0 1 2 3 4 5 6 - 3: 0 1 2 3 4 5 6 - 4: 0 1 2 3 4 5 6 - 5: 0 1 2 3 4 5 6 - 6: 0 1 2 3 4 5 6 - -follows after includes: - FOLLOWS[goto[0] = (0, exp, 2)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[1] = (4, exp, 10)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[2] = (5, exp, 11)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[3] = (6, exp, 12)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[4] = (7, exp, 13)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[5] = (8, exp, 14)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[6] = (9, exp, 15)] = $end '<' '>' '+' '-' '^' '=' - -Lookaheads: - State 1: - rule 7: - State 3: - rule 0: - State 10: - rule 1: $end '<' '>' '+' '-' '^' '=' - State 11: - rule 2: $end '<' '>' '+' '-' '^' '=' - State 12: - rule 3: $end '<' '>' '+' '-' '^' '=' - State 13: - rule 4: $end '<' '>' '+' '-' '^' '=' - State 14: - rule 5: $end '<' '>' '+' '-' '^' '=' - State 15: - rule 6: $end '<' '>' '+' '-' '^' '=' - ./sets.at:240: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +stderr: bison (GNU Bison) 3.8.2 RITEM 0: a $end (rule 0) @@ -5787,26 +5596,139 @@ State 10: rule 0: - ./sets.at:127: sed -n 's/[ ]*$//;/^RTC: Firsts Output BEGIN/,/^RTC: Firsts Output END/p' stderr -./sets.at:172: sed -f extract.sed stderr -161. diagnostics.at:713: ok -172. sets.at:269: testing Build relations ... -170. sets.at:153: ok -169. sets.at:111: ok -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -stderr: +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror ./sets.at:243: sed -n 's/.*define YYFINAL *\([0-9][0-9]*\)/final state \1/p' input.c -stdout: - -stdout: -./skeletons.at:122: $PREPARSER ./input-cmd-line -final state 6 stderr: +bison (GNU Bison) 3.8.2 +RITEM + 0: exp $end (rule 0) + 3: exp '<' exp (rule 1) + 7: exp '>' exp (rule 2) + 11: exp '+' exp (rule 3) + 15: exp '-' exp (rule 4) + 19: exp '^' exp (rule 5) + 23: exp '=' exp (rule 6) + 27: "exp" (rule 7) -syntax error, unexpected 'a', expecting end of file -./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +DERIVES + $accept derives + 0 exp $end + exp derives + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + + +NULLABLE + $accept: no + exp: no + + +RTC: Firsts Input BEGIN + + 01 + .--. + 0| 1| + 1| 1| + `--' +RTC: Firsts Input END + +RTC: Firsts Output BEGIN + + 01 + .--. + 0|11| + 1| 1| + `--' +RTC: Firsts Output END + +FIRSTS + $accept firsts + $accept + exp + exp firsts + exp + + +FDERIVES + $accept derives + 0 exp $end + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + exp derives + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + + +relation_transpose: + 0: 1 2 3 4 5 6 + 1: 1 2 3 4 5 6 + 2: 1 2 3 4 5 6 + 3: 1 2 3 4 5 6 + 4: 1 2 3 4 5 6 + 5: 1 2 3 4 5 6 + 6: 1 2 3 4 5 6 + +relation_transpose: output: + 1: 0 1 2 3 4 5 6 + 2: 0 1 2 3 4 5 6 + 3: 0 1 2 3 4 5 6 + 4: 0 1 2 3 4 5 6 + 5: 0 1 2 3 4 5 6 + 6: 0 1 2 3 4 5 6 + +follows after includes: + FOLLOWS[goto[0] = (0, exp, 2)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[1] = (4, exp, 10)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[2] = (5, exp, 11)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[3] = (6, exp, 12)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[4] = (7, exp, 13)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[5] = (8, exp, 14)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[6] = (9, exp, 15)] = $end '<' '>' '+' '-' '^' '=' + +Lookaheads: + State 1: + rule 7: + State 3: + rule 0: + State 10: + rule 1: $end '<' '>' '+' '-' '^' '=' + State 11: + rule 2: $end '<' '>' '+' '-' '^' '=' + State 12: + rule 3: $end '<' '>' '+' '-' '^' '=' + State 13: + rule 4: $end '<' '>' '+' '-' '^' '=' + State 14: + rule 5: $end '<' '>' '+' '-' '^' '=' + State 15: + rule 6: $end '<' '>' '+' '-' '^' '=' + +./sets.at:172: sed -f extract.sed stderr +./diagnostics.at:725: sed 's,.*/$,,' stderr 1>&2 +./skeletons.at:69: cat input-gram.tab.c +stdout: +./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y +168. sets.at:27: final state 6 + ok +170. sets.at:153: ok +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=error ./sets.at:248: sed -n ' /^State \(.*\)/{ s//final state \1/ @@ -5818,20 +5740,42 @@ q } ' input.output +15. input.at:774: ok +169. sets.at:111: ok +16. input.at:784: ok 171. sets.at:228: ok + + + +./skeletons.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input4.y + +stderr: +stderr: +stdout: +stdout: +./skeletons.at:74: cat input-cmd-line.tab.c +stderr: 173. sets.at:315: testing Reduced Grammar ... ./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input-gram.c input-gram.y -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y +stdout: +./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y +./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y +stderr: 174. sets.at:394: testing Reduced Grammar with prec and assoc ... +stderr: +./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y +stdout: +stdout: 175. reduce.at:26: testing Useless Terminals ... +176. reduce.at:70: testing Useless Nonterminals ... ./reduce.at:47: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -176. reduce.at:70: testing Useless Nonterminals ... +./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y ./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./skeletons.at:127: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-gram input-gram.c $LIBS +./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y stderr: + input.y: error: 5 reduce/reduce conflicts [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:2.14-17: error: rule useless in parser due to conflicts [-Werror=other] @@ -5849,20 +5793,82 @@ input.y:2.42-45: error: rule useless in parser due to conflicts [-Werror=other] 2 | expr: term | term | term | term | term | term | ^~~~ -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror +165. skeletons.at:166: ok +stderr: +stdout: +./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y +177. reduce.at:120: testing Useless Rules ... +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +stderr: +stdout: +178. reduce.at:224: testing Useless Parts ... +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y + +./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y ./sets.at:286: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none +162. skeletons.at:25: ok +179. reduce.at:312: testing Reduced Automaton ... +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret not-reduced.y +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror + +stderr: +174. sets.at:394: ok +stdout: +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=none -Werror --trace=none +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Werror +./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y stderr: +./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output stdout: +./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y + +180. reduce.at:406: testing Underivable Rules ... +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./output.at:341: ls "[.cc" "[.hh" +./output.at:336: ls ").cc" ").hh" +./output.at:338: ls "@@.cc" "@@.hh" +./output.at:335: ls "(.cc" "(.hh" +./output.at:339: ls "@{.cc" "@{.hh" +./output.at:342: ls "].cc" "].hh" +./output.at:340: ls "@}.cc" "@}.hh" +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +181. reduce.at:452: testing Bad start symbols ... +./reduce.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Werror +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +stdout: +stdout: +stdout: +stdout: +stdout: +stdout: +stdout: +@@.cc +@@.hh +(.cc +(.hh +].cc +].hh +@}.cc +@}.hh +@{.cc +@{.hh +).cc +).hh +[.cc +[.hh 175. reduce.at:26: ok -174. sets.at:394: ok -137. output.at:342: ok -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror - +182. reduce.at:550: testing no lr.type: Single State Split ... +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./output.at:337: ls "#.cc" "#.hh" +./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none stderr: - bison (GNU Bison) 3.8.2 input.y: error: 1 nonterminal useless in grammar [-Werror=other] input.y: error: 1 rule useless in grammar [-Werror=other] @@ -5920,28 +5926,64 @@ reduced input.y defines 7 terminals, 4 nonterminals, and 6 productions. +./reduce.at:550: sed -n '/^State 0$/,$p' input.output +./output.at:338: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@@.cc" +./output.at:335: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "(.cc" +./output.at:340: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@}.cc" +./sets.at:325: sed 's,.*/$,,' stderr 1>&2 +./output.at:336: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c ").cc" +./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./output.at:339: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@{.cc" +./output.at:342: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "].cc" +./output.at:341: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "[.cc" +stdout: +stdout: +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh +#.cc +#.hh +161. diagnostics.at:713: ok + +183. reduce.at:550: testing lr.type=lalr: Single State Split ... +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: -177. reduce.at:120: testing Useless Rules ... -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y input.y: error: 3 nonterminals useless in grammar [-Werror=other] input.y: error: 3 rules useless in grammar [-Werror=other] input.y:11.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] input.y:12.1-8: error: nonterminal useless in grammar: useless2 [-Werror=other] input.y:13.1-8: error: nonterminal useless in grammar: useless3 [-Werror=other] -179. reduce.at:312: testing Reduced Automaton ... -178. reduce.at:224: testing Useless Parts ... -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret not-reduced.y -./reduce.at:89: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -./sets.at:325: sed 's,.*/$,,' stderr 1>&2 -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Werror -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Werror +stderr: +stderr: +not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other] +not-reduced.y: error: 3 rules useless in grammar [-Werror=other] +not-reduced.y:14.1-13: error: nonterminal useless in grammar: not_reachable [-Werror=other] + 14 | not_reachable: useful { /* A not reachable action. */ } + | ^~~~~~~~~~~~~ +not-reduced.y:17.1-14: error: nonterminal useless in grammar: non_productive [-Werror=other] + 17 | non_productive: non_productive useless_token + | ^~~~~~~~~~~~~~ +not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other] + 11 | | non_productive { /* A non productive action. */ } + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y: error: 1 nonterminal useless in grammar [-Werror=other] +input.y: error: 1 rule useless in grammar [-Werror=other] +input.y:18.1-6: error: nonterminal useless in grammar: unused [-Werror=other] + 18 | unused + | ^~~~~~ ./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none stderr: +input.y: error: 2 nonterminals useless in grammar [-Werror=other] +input.y: error: 3 rules useless in grammar [-Werror=other] +input.y:6.1-11: error: nonterminal useless in grammar: underivable [-Werror=other] + 6 | underivable: indirection; + | ^~~~~~~~~~~ +input.y:7.1-11: error: nonterminal useless in grammar: indirection [-Werror=other] + 7 | indirection: underivable; + | ^~~~~~~~~~~ +input.y:5.15-25: error: rule useless in grammar [-Werror=other] + 5 | exp: useful | underivable; + | ^~~~~~~~~~~ +stderr: input.y: error: 9 nonterminals useless in grammar [-Werror=other] input.y: error: 9 rules useless in grammar [-Werror=other] input.y:10.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] @@ -5971,392 +6013,364 @@ input.y:18.1-8: error: nonterminal useless in grammar: useless9 [-Werror=other] 18 | useless9: '9'; | ^~~~~~~~ -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -stderr: -not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other] -not-reduced.y: error: 3 rules useless in grammar [-Werror=other] -not-reduced.y:14.1-13: error: nonterminal useless in grammar: not_reachable [-Werror=other] - 14 | not_reachable: useful { /* A not reachable action. */ } - | ^~~~~~~~~~~~~ -not-reduced.y:17.1-14: error: nonterminal useless in grammar: non_productive [-Werror=other] - 17 | non_productive: non_productive useless_token - | ^~~~~~~~~~~~~~ -not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other] - 11 | | non_productive { /* A non productive action. */ } - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +./reduce.at:550: sed -n '/^State 0$/,$p' input.output +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error +./reduce.at:420: sed 's,.*/$,,' stderr 1>&2 ./reduce.at:146: sed 's,.*/$,,' stderr 1>&2 -stderr: -stdout: +./reduce.at:261: sed 's,.*/$,,' stderr 1>&2 +./output.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" +./reduce.at:89: sed 's,.*/$,,' stderr 1>&2 +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./output.at:337: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "#.cc" ./reduce.at:341: sed 's,.*/$,,' stderr 1>&2 -stderr: -./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh -input.y: error: 1 nonterminal useless in grammar [-Werror=other] -input.y: error: 1 rule useless in grammar [-Werror=other] -input.y:18.1-6: error: nonterminal useless in grammar: unused [-Werror=other] - 18 | unused - | ^~~~~~ +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -172. sets.at:269: ./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh -./reduce.at:261: sed 's,.*/$,,' stderr 1>&2 - ok -stderr: -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error -stdout: -./skeletons.at:128: $PREPARSER ./input-gram -stderr: -./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy - -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none ./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error -syntax error, unexpected 'a', expecting end of file -./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -180. reduce.at:406: testing Underivable Rules ... -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -163. skeletons.at:85: ./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none - ok -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none -./output.at:806: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc - -./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none -./reduce.at:109: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none -181. reduce.at:452: testing Bad start symbols ... -./reduce.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none -./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -stderr: +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error +./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +172. sets.at:269: ok stderr: -input.y: error: 2 nonterminals useless in grammar [-Werror=other] -input.y: error: 3 rules useless in grammar [-Werror=other] -input.y:6.1-11: error: nonterminal useless in grammar: underivable [-Werror=other] - 6 | underivable: indirection; - | ^~~~~~~~~~~ -input.y:7.1-11: error: nonterminal useless in grammar: indirection [-Werror=other] - 7 | indirection: underivable; - | ^~~~~~~~~~~ -input.y:5.15-25: error: rule useless in grammar [-Werror=other] - 5 | exp: useful | underivable; - | ^~~~~~~~~~~ stdout: -./reduce.at:355: sed -n '/^Grammar/q;/^$/!p' not-reduced.output -./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output -173. sets.at:315: ok -176. reduce.at:70: ok -./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:420: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret reduced.y -./reduce.at:213: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none +./skeletons.at:122: $PREPARSER ./input-cmd-line +181. reduce.at:452: ok -./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:298: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -182. reduce.at:550: testing no lr.type: Single State Split ... -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -183. reduce.at:550: testing lr.type=lalr: Single State Split ... +184. reduce.at:550: testing lr.type=ielr: Single State Split ... +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none ./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none ./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -181. reduce.at:452: ok -./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c -179. reduce.at:312: ok +185. reduce.at:550: testing lr.type=canonical-lr: Single State Split ... +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none stderr: -stdout: - +syntax error, unexpected 'a', expecting end of file +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input-gram.c input-gram.y +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none +./reduce.at:550: sed -n '/^State 0$/,$p' input.output ./reduce.at:550: sed -n '/^State 0$/,$p' input.output +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none ./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -177. reduce.at:120: ok ./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:550: sed -n '/^State 0$/,$p' input.output - -184. reduce.at:550: testing lr.type=ielr: Single State Split ... -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./skeletons.at:127: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-gram input-gram.c $LIBS ./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -185. reduce.at:550: testing lr.type=canonical-lr: Single State Split ... -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:434: sed -n '/^Grammar/q;/^$/!p' input.output +./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output +./reduce.at:355: sed -n '/^Grammar/q;/^$/!p' not-reduced.output +173. sets.at:315: ok 180. reduce.at:406: ok -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -186. reduce.at:783: testing no lr.type: Lane Split ... -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -stdout: -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -178. reduce.at:224: ok -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: sed -n '/^State 0$/,$p' input.output -187. reduce.at:783: testing lr.type=lalr: Lane Split ... -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -188. reduce.at:783: testing lr.type=ielr: Lane Split ... -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: sed -n '/^State 0$/,$p' input.output -./reduce.at:783: sed -n '/^State 0$/,$p' input.output -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./reduce.at:550: $PREPARSER ./input -stderr: -stdout: stderr: stdout: ./reduce.at:550: $PREPARSER ./input -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +186. reduce.at:783: testing no lr.type: Lane Split ... +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +187. reduce.at:783: testing lr.type=lalr: Lane Split ... +./reduce.at:109: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./reduce.at:298: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret reduced.y stderr: syntax error ./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:550: $PREPARSER ./input stderr: stdout: +./reduce.at:550: $PREPARSER ./input stderr: syntax error ./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:550: $PREPARSER ./input +./reduce.at:783: sed -n '/^State 0$/,$p' input.output 183. reduce.at:550: ok -185. reduce.at:550: stderr: -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok +./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output +./reduce.at:783: sed -n '/^State 0$/,$p' input.output 182. reduce.at:550: ok -184. reduce.at:550: ok - +188. reduce.at:783: testing lr.type=ielr: Lane Split ... +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y 189. reduce.at:783: testing lr.type=canonical-lr: Lane Split ... ./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:783: sed -n '/^State 0$/,$p' input.output +./reduce.at:213: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +179. reduce.at:312: ok +stderr: +stdout: +stderr: +stdout: +./reduce.at:783: sed -n '/^State 0$/,$p' input.output +178. reduce.at:224: ok + +176. reduce.at:70: ok +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 190. reduce.at:1027: testing no lr.type: Complex Lane Split ... -191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ... -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ... ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: stdout: -./reduce.at:783: $PREPARSER ./input stderr: -syntax error -./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -186. reduce.at:783: ok -./reduce.at:1027: sed -n '/^State 0$/,$p' input.output -./reduce.at:783: sed -n '/^State 0$/,$p' input.output -./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +./reduce.at:550: $PREPARSER ./input +stderr: +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: + +191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:550: $PREPARSER ./input +184. reduce.at:550: ok +stderr: +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y + +185. reduce.at:550: ok -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1027: sed -n '/^State 0$/,$p' input.output -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 193. reduce.at:1027: testing lr.type=canonical-lr: Complex Lane Split ... ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: +./skeletons.at:128: $PREPARSER ./input-gram +./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +stderr: stderr: +syntax error, unexpected 'a', expecting end of file stdout: -./reduce.at:783: $PREPARSER ./input +./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +177. reduce.at:120: ok ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: $PREPARSER ./input +163. skeletons.at:85: ok +stderr: +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: + +./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh + +./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh +./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy +195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ... +196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror stderr: +stdout: +./output.at:806: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc +./reduce.at:783: $PREPARSER ./input stderr: syntax error -stderr: ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +186. reduce.at:783: ok +stderr: stdout: -./reduce.at:1027: $PREPARSER ./input +./reduce.at:783: $PREPARSER ./input stderr: ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -188. reduce.at:783: ok -190. reduce.at:1027: ok -187. reduce.at:783: ok +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output stderr: -stdout: -./reduce.at:1027: $PREPARSER ./input stderr: +stdout: +stdout: - -syntax error -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:783: $PREPARSER ./input +189. reduce.at:783: ok +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: - -stdout: ./reduce.at:783: $PREPARSER ./input +syntax error +./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -191. reduce.at:1027: ok +stderr: +input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ... -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ... +187. reduce.at:783: ok +188. reduce.at:783: ok +197. reduce.at:1296: testing lr.type=canonical-lr: Split During Added Lookahead Propagation ... ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -189. reduce.at:783: ok -196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ... +./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -197. reduce.at:1296: testing lr.type=canonical-lr: Split During Added Lookahead Propagation ... -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error + 198. reduce.at:1627: testing no lr.default-reduction ... ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +199. reduce.at:1627: testing lr.default-reduction=most ... +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +200. reduce.at:1627: testing lr.default-reduction=consistent ... +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stderr: -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: ./reduce.at:1027: $PREPARSER ./input -./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 stderr: -stderr: -input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +syntax error ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 +190. reduce.at:1027: ok ./reduce.at:1296: sed -n '/^State 0$/,$p' input.output ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 -192. reduce.at:1027: ok -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output ./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -stdout: -199. reduce.at:1627: testing lr.default-reduction=most ... -./reduce.at:1027: $PREPARSER ./input -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -193. reduce.at:1027: ok +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none stderr: -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: -200. reduce.at:1627: testing lr.default-reduction=consistent ... -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1296: $PREPARSER ./input stderr: ./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1027: $PREPARSER ./input +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +syntax error stdout: -196. reduce.at:1296: ok -./reduce.at:1296: $PREPARSER ./input -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +./reduce.at:1027: $PREPARSER ./input stderr: -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -197. reduce.at:1296: ok -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +191. reduce.at:1027: ok +193. reduce.at:1027: ok +201. reduce.at:1627: testing lr.default-reduction=accepting ... +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: -stdout: -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1627: $PREPARSER ./input + +stdout: +./reduce.at:1027: $PREPARSER ./input stderr: -201. reduce.at:1627: testing lr.default-reduction=accepting ... -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -198. reduce.at:1627: ok +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +203. report.at:3123: testing Reports with conflicts ... 202. report.at:37: testing Reports ... -202. report.at:37: skipped (report.at:75) +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +202. report.at:37: 192. reduce.at:1027: skipped (report.at:75) + ok +203. report.at:3123: skipped (report.at:3132) + +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none ./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 204. conflicts.at:28: testing Token declaration order ... ./conflicts.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -203. report.at:3123: testing Reports with conflicts ... -203. report.at:3123: stderr: -stdout: -./reduce.at:1627: $PREPARSER ./input - skipped (report.at:3132) -stderr: -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +206. conflicts.at:183: testing Useless associativity warning ... +205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ... +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence input.y +./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Werror stderr: stdout: ./reduce.at:1296: $PREPARSER ./input - stderr: -syntax error ./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -199. reduce.at:1627: ok -194. reduce.at:1296: ok -stdout: -205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ... +196. reduce.at:1296: ./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p' + ok stderr: -./reduce.at:1627: $PREPARSER ./input -./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y stdout: +./reduce.at:1627: $PREPARSER ./input stderr: -./reduce.at:1296: $PREPARSER ./input +205. conflicts.at:101: ok ./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:82: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +198. reduce.at:1627: ok -200. reduce.at:1627: ok -./conflicts.at:82: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -195. reduce.at:1296: ok +stdout: +./reduce.at:1296: $PREPARSER ./input +208. conflicts.at:275: testing S/R in initial ... +stderr: 207. conflicts.at:218: testing Useless precedence warning ... ./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input.y:2.1-9: error: useless precedence and associativity for "=" [-Werror=precedence] +input.y:4.1-5: error: useless associativity for "*", use %precedence [-Werror=precedence] +input.y:5.1-11: error: useless precedence for "(" [-Werror=precedence] +stderr: +197. reduce.at:1296: ok stdout: -206. conflicts.at:183: testing Useless associativity warning ... -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence input.y -./output.at:835: $CXX $CPPFLAGS $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS - +./reduce.at:1627: $PREPARSER ./input stderr: -208. conflicts.at:275: testing S/R in initial ... ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p' -205. conflicts.at:101: ok -209. conflicts.at:301: testing %nonassoc and eof ... -./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror stderr: -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Werror stdout: +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:1627: $PREPARSER ./input - -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Werror stderr: +200. reduce.at:1627: ok + +209. conflicts.at:301: testing %nonassoc and eof ... ./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2 +199. reduce.at:1627: ok + +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=error 210. conflicts.at:509: testing parse.error=verbose and consistent errors: lr.type=ielr ... +./conflicts.at:509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +211. conflicts.at:513: testing parse.error=verbose and consistent errors: lr.type=ielr %glr-parser ... +./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Werror +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./conflicts.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ... +./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./conflicts.at:509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:84: $PREPARSER ./input -201. reduce.at:1627: ok stderr: ./conflicts.at:84: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./conflicts.at:509: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./reduce.at:1627: $PREPARSER ./input +204. conflicts.at:28: ok +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none +stderr: +stderr: +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] + +201. reduce.at:1627: ok +./conflicts.at:513: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2 + +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error stderr: input.y:7.1-9: error: useless precedence and associativity for U [-Werror=precedence] 7 | %nonassoc U @@ -6370,38 +6384,112 @@ input.y:2.1-11: error: useless precedence for Z [-Werror=precedence] 2 | %precedence Z | ^~~~~~~~~~~ -input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] -204. conflicts.at:28: ok -./conflicts.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2 +stderr: +stdout: +134. output.at:339: ok +./conflicts.at:518: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./conflicts.at:248: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error - +stderr: ./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error +213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ... +./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y + +stdout: +./reduce.at:1296: $PREPARSER ./input stderr: -input.y:2.1-9: error: useless precedence and associativity for "=" [-Werror=precedence] -input.y:4.1-5: error: useless associativity for "*", use %precedence [-Werror=precedence] -input.y:5.1-11: error: useless precedence for "(" [-Werror=precedence] -./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2 -211. conflicts.at:513: testing parse.error=verbose and consistent errors: lr.type=ielr %glr-parser ... -./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:509: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ... -./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=error +214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ... +syntax error +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +194. reduce.at:1296: ok +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none +215. conflicts.at:535: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=accepting ... +./conflicts.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + +stderr: +stdout: +130. output.at:335: ok +stderr: +stdout: ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:513: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +131. output.at:336: 213. conflicts.at:523: ok +216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + skipped (conflicts.at:523) +stderr: +stdout: + +./conflicts.at:530: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +132. output.at:337: ok +stderr: + + +stdout: + +./reduce.at:1296: $PREPARSER ./input +stderr: +syntax error +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ... +./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ... +./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:518: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +195. reduce.at:1296: ok +220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ... +206. conflicts.at:183: ok +218. conflicts.at:551: testing parse.error=verbose and consistent errors: lr.type=ielr parse.lac=full ... +./conflicts.at:551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none +stdout: + +133. output.at:338: ok +./conflicts.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y + +221. conflicts.at:622: testing parse.error=verbose and consistent errors: ... +./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:540: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ... +stderr: +stdout: +223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ... +./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +136. output.at:341: ok +./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:546: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:558: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +stderr: +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none +stderr: +stdout: +stdout: +135. output.at:340: 137. output.at:342: ok +./conflicts.at:551: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + ok + + + +./conflicts.at:564: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./conflicts.at:622: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +224. conflicts.at:638: testing parse.error=verbose and consistent errors: lr.default-reduction=accepting ... +./conflicts.at:638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ... +225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:632: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:626: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none 207. conflicts.at:218: ok +./conflicts.at:638: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:642: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +227. conflicts.at:651: testing parse.error=verbose and consistent errors: parse.lac=full lr.default-reduction=accepting ... +./conflicts.at:651: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] 4 | e: 'e' | %empty; @@ -6410,528 +6498,369 @@ stdout: ./conflicts.at:368: $PREPARSER ./input '0<0' stderr: -./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ... -./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y ./conflicts.at:368: $PREPARSER ./input '0<0<0' +./conflicts.at:647: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2 stderr: syntax error, unexpected '<' -206. conflicts.at:183: ok ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error ./conflicts.at:368: $PREPARSER ./input '0>0' +129. output.at:328: ok stderr: -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:368: $PREPARSER ./input '0>0>0' -stderr: -syntax error, unexpected '>' -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -213. conflicts.at:523: skipped (conflicts.at:523) -./conflicts.at:368: $PREPARSER ./input '0<0>0' stderr: -syntax error, unexpected '>' -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ... - ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.default-reduction=consistent -o input.c input.y -stderr: stdout: -215. conflicts.at:535: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=accepting ... -./conflicts.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none ./conflicts.at:509: $PREPARSER ./input stderr: +./conflicts.at:368: $PREPARSER ./input '0>0>0' syntax error, unexpected end of file ./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:530: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:372: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -210. conflicts.at:509: ok - -208. conflicts.at:275: ./conflicts.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - ok -216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... -./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -stdout: -./output.at:836: $PREPARSER ./parser +syntax error, unexpected '>' +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +210. conflicts.at:509: ./conflicts.at:368: $PREPARSER ./input '0<0>0' + ok stderr: -./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ... -./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -144. output.at:744: ok -./conflicts.at:540: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +syntax error, unexpected '>' +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.default-reduction=consistent -o input.c input.y +./conflicts.at:651: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:546: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -218. conflicts.at:551: testing parse.error=verbose and consistent errors: lr.type=ielr parse.lac=full ... -./conflicts.at:551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:551: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +228. conflicts.at:676: testing LAC: %nonassoc requires splitting canonical LR states ... +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +229. conflicts.at:764: testing Unresolved SR Conflicts ... +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +./conflicts.at:372: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./conflicts.at:530: $PREPARSER ./input stderr: syntax error, unexpected end of file, expecting 'a' or 'b' ./conflicts.at:530: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror 214. conflicts.at:530: ok -stdout: -./conflicts.at:372: $PREPARSER ./input '0<0' -stderr: -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:372: $PREPARSER ./input '0<0<0' -stderr: -syntax error, unexpected '<', expecting end of file -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +230. conflicts.at:887: testing Resolved SR Conflicts ... +./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y stderr: -stderr: -stdout: -./conflicts.at:372: $PREPARSER ./input '0>0' stdout: -./conflicts.at:513: $PREPARSER ./input -stderr: -stderr: ./conflicts.at:535: $PREPARSER ./input -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected end of file -./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error, unexpected end of file, expecting 'a' or 'b' ./conflicts.at:535: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ... -./conflicts.at:372: $PREPARSER ./input '0>0>0' -./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +208. conflicts.at:275: ok stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -211. conflicts.at:513: ok -215. conflicts.at:535: ok -./conflicts.at:372: $PREPARSER ./input '0<0>0' +input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ... -./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -./conflicts.at:558: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error +215. conflicts.at:535: ok +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error stderr: stdout: ./conflicts.at:540: $PREPARSER ./input +231. conflicts.at:989: testing %precedence suffices ... +./conflicts.at:1006: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: syntax error, unexpected end of file, expecting 'a' or 'b' +./conflicts.at:901: cat input.output ./conflicts.at:540: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -221. conflicts.at:622: testing parse.error=verbose and consistent errors: ... -./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: +230. conflicts.at:887: ok + 216. conflicts.at:540: ok -./conflicts.at:564: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:546: $PREPARSER ./input -stderr: -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:546: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -217. conflicts.at:546: ok -./conflicts.at:622: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ... -./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:381: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ... -./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:626: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:632: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +232. conflicts.at:1015: testing %precedence does not suffice ... +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +233. conflicts.at:1096: testing Syntax error in consistent error state: yacc.c ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none +234. conflicts.at:1096: testing Syntax error in consistent error state: glr.c ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +231. conflicts.at:989: ok stderr: stdout: ./conflicts.at:551: $PREPARSER ./input stderr: +stderr: syntax error, unexpected end of file, expecting 'b' ./conflicts.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -218. conflicts.at:551: ok -224. conflicts.at:638: testing parse.error=verbose and consistent errors: lr.default-reduction=accepting ... -./conflicts.at:638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none stdout: +218. conflicts.at:551: ok +./conflicts.at:546: $PREPARSER ./input stderr: -./conflicts.at:622: $PREPARSER ./input +syntax error, unexpected end of file, expecting 'b' +./conflicts.at:546: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +217. conflicts.at:546: ok +235. conflicts.at:1096: testing Syntax error in consistent error state: lalr1.cc ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror + +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +stderr: +236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ... stdout: +./conflicts.at:632: $PREPARSER ./input stderr: -./conflicts.at:381: $PREPARSER ./input '0<0' -syntax error, unexpected 'b' -./conflicts.at:622: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -221. conflicts.at:622: ok -./conflicts.at:638: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:381: $PREPARSER ./input '0<0<0' -stderr: -stderr: -syntax error, unexpected '<', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stdout: -./conflicts.at:632: $PREPARSER ./input -stderr: +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y syntax error, unexpected 'b' ./conflicts.at:632: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y 223. conflicts.at:632: ok -./conflicts.at:381: $PREPARSER ./input '0>0' -stderr: -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... -./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -./conflicts.at:381: $PREPARSER ./input '0>0>0' -stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ... -./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:381: $PREPARSER ./input '0<0>0' -stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y -./conflicts.at:642: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:647: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:388: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none stdout: -./conflicts.at:626: $PREPARSER ./input +./conflicts.at:622: $PREPARSER ./input stderr: syntax error, unexpected 'b' -stderr: -./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./conflicts.at:518: $PREPARSER ./input -stderr: -222. conflicts.at:626: ok -syntax error, unexpected end of file -./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -212. conflicts.at:518: ok - - -227. conflicts.at:651: testing parse.error=verbose and consistent errors: parse.lac=full lr.default-reduction=accepting ... -./conflicts.at:651: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -228. conflicts.at:676: testing LAC: %nonassoc requires splitting canonical LR states ... -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -stderr: -stdout: -./conflicts.at:638: $PREPARSER ./input -./conflicts.at:651: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror -stderr: -syntax error, unexpected end of file, expecting 'a' -./conflicts.at:638: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -224. conflicts.at:638: ok -stderr: -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples - -./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error -stderr: -stdout: -./conflicts.at:642: $PREPARSER ./input -stderr: -syntax error, unexpected end of file, expecting 'a' -./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -229. conflicts.at:764: testing Unresolved SR Conflicts ... -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y -stderr: -stdout: -./conflicts.at:388: $PREPARSER ./input '0<0' -225. conflicts.at:642: ok -stderr: -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:388: $PREPARSER ./input '0<0<0' -stderr: -syntax error, unexpected '<', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:622: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:388: $PREPARSER ./input '0>0' -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:388: $PREPARSER ./input '0>0>0' -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror -stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -230. conflicts.at:887: testing Resolved SR Conflicts ... -./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y -./conflicts.at:388: $PREPARSER ./input '0<0>0' -stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +221. conflicts.at:622: ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +./conflicts.at:780: cat input.output stderr: -input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stdout: -./conflicts.at:647: $PREPARSER ./input -209. conflicts.at:301: ok -stderr: -syntax error, unexpected 'b' -./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:901: cat input.output -./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none - -230. conflicts.at:887: ok -226. conflicts.at:647: ok -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error -231. conflicts.at:989: testing %precedence suffices ... -./conflicts.at:1006: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - - -232. conflicts.at:1015: testing %precedence does not suffice ... -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:731: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none -231. conflicts.at:989: ok -233. conflicts.at:1096: testing Syntax error in consistent error state: yacc.c ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -234. conflicts.at:1096: testing Syntax error in consistent error state: glr.c ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:780: cat input.output stderr: input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:12.3-18: error: rule useless in parser due to conflicts [-Werror=other] -229. conflicts.at:764: ok +./conflicts.at:638: $PREPARSER ./input stderr: input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] stderr: -stdout: +229. conflicts.at:764: ok +syntax error, unexpected end of file, expecting 'a' +./conflicts.at:638: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./conflicts.at:1033: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:651: $PREPARSER ./input ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +224. conflicts.at:638: ok +238. conflicts.at:1127: testing Defaulted Conflicted Reduction ... +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y stderr: ./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error - -syntax error, unexpected end of file -./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: -./conflicts.at:564: $PREPARSER ./input ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -stderr: -227. conflicts.at:651: ok -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +239. conflicts.at:1264: testing %expect not enough ... +./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -220. conflicts.at:564: ok -235. conflicts.at:1096: testing Syntax error in consistent error state: lalr1.cc ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +./conflicts.at:642: $PREPARSER ./input -236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: +syntax error, unexpected end of file, expecting 'a' +./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +239. conflicts.at:1264: ok input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./conflicts.at:731: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +225. conflicts.at:642: ok ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +240. conflicts.at:1284: testing %expect right ... +./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +241. conflicts.at:1301: testing %expect too much ... stderr: +./conflicts.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror + +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -stderr: -stdout: -./conflicts.at:732: $PREPARSER ./input -stderr: -syntax error, unexpected 'a', expecting 'b' -./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +241. conflicts.at:1301: ok ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -232. conflicts.at:1015: ok -./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -238. conflicts.at:1127: testing Defaulted Conflicted Reduction ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +242. conflicts.at:1321: testing %expect with reduce conflicts ... +./conflicts.at:1330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y + +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +242. conflicts.at:1321: ok +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none stderr: + input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] +240. conflicts.at:1284: ok +243. conflicts.at:1341: testing %expect in grammar rule not enough ... +./conflicts.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +244. conflicts.at:1360: testing %expect in grammar rule right ... +243. conflicts.at:1341: ok +stderr: stderr: +./conflicts.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +stdout: +./conflicts.at:647: $PREPARSER ./input input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] + +stderr: +stdout: +./conflicts.at:372: $PREPARSER ./input '0<0' +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +syntax error, unexpected 'b' +./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +245. conflicts.at:1377: testing %expect in grammar rules ... +./conflicts.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -rall input.y + ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -Werror ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error -./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -stderr: -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +./conflicts.at:372: $PREPARSER ./input '0<0<0' stderr: -stdout: -./conflicts.at:558: $PREPARSER ./input stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2 +syntax error, unexpected '<', expecting end of file +246. conflicts.at:1396: testing %expect in grammar rule too much ... input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other] -stderr: -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y --warnings=error -219. conflicts.at:558: ok +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +226. conflicts.at:647: ./conflicts.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y + ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error ./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +246. conflicts.at:1396: ok ./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error -./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -239. conflicts.at:1264: testing %expect not enough ... -./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -239. conflicts.at:1264: ok - -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none -240. conflicts.at:1284: testing %expect right ... -./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: -./conflicts.at:1096: $PREPARSER ./input -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:372: $PREPARSER ./input '0>0' stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none -233. conflicts.at:1096: ok -240. conflicts.at:1284: ok -./conflicts.at:742: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./conflicts.at:1145: cat input.output -238. conflicts.at:1127: ok - -241. conflicts.at:1301: testing %expect too much ... -./conflicts.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -241. conflicts.at:1301: ok -242. conflicts.at:1321: testing %expect with reduce conflicts ... -./conflicts.at:1330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -242. conflicts.at:1321: ok - - -244. conflicts.at:1360: testing %expect in grammar rule right ... -./conflicts.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -243. conflicts.at:1341: testing %expect in grammar rule not enough ... -./conflicts.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y - -243. conflicts.at:1341: ok -244. conflicts.at:1360: ok -245. conflicts.at:1377: testing %expect in grammar rules ... -./conflicts.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -rall input.y - - -246. conflicts.at:1396: testing %expect in grammar rule too much ... -./conflicts.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -245. conflicts.at:1377: ok -246. conflicts.at:1396: ok +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 247. conflicts.at:1415: testing %expect-rr in grammar rule ... ./conflicts.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - - -247. conflicts.at:1415: ok +./conflicts.at:372: $PREPARSER ./input '0>0>0' stderr: -stdout: -./conflicts.at:743: $PREPARSER ./input +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +syntax error, unexpected '>', expecting end of file +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./conflicts.at:372: $PREPARSER ./input '0<0>0' +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none stderr: -syntax error, unexpected 'a', expecting 'b' or 'c' -./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -249. conflicts.at:1469: testing %expect-rr not enough in grammar rule ... -./conflicts.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y +syntax error, unexpected '>', expecting end of file +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +244. conflicts.at:1360: ok 248. conflicts.at:1440: testing %expect-rr too much in grammar rule ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none ./conflicts.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -249. conflicts.at:1469: ok +./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none +249. conflicts.at:1469: testing %expect-rr not enough in grammar rule ... +./conflicts.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y 248. conflicts.at:1440: ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +249. conflicts.at:1469: ok +245. conflicts.at:1377: + ok +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none +247. conflicts.at:1415: ok 250. conflicts.at:1498: testing %prec with user string ... ./conflicts.at:1507: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror -251. conflicts.at:1515: testing %no-default-prec without %prec ... -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y +./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 252. conflicts.at:1544: testing %no-default-prec with %prec ... ./conflicts.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -250. conflicts.at:1498: ok +251. conflicts.at:1515: testing %no-default-prec without %prec ... +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none -stderr: -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror +232. conflicts.at:1015: ok +./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 253. conflicts.at:1568: testing %default-prec ... ./conflicts.at:1584: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -252. conflicts.at:1544: ok -./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2 -253. conflicts.at:1568: ok -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error - +./conflicts.at:381: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./conflicts.at:1096: $PREPARSER ./input -stderr: -input.y: error: 4 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:1.1-5: error: useless precedence and associativity for '+' [-Werror=precedence] -input.y:2.1-5: error: useless precedence and associativity for '*' [-Werror=precedence] + +./conflicts.at:651: $PREPARSER ./input stderr: -syntax error +syntax error, unexpected end of file +./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 254. conflicts.at:1592: testing Unreachable States After Conflict Resolution ... -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y +227. conflicts.at:651: ok +250. conflicts.at:1498: 252. conflicts.at:1544: ok + ok -./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2 -234. conflicts.at:1096: ok 255. conflicts.at:1855: testing Solved conflicts report for multiple reductions in a state ... ./conflicts.at:1881: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Werror -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror + +253. conflicts.at:1568: ok 256. conflicts.at:1935: testing %nonassoc error actions for multiple reductions in a state ... ./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y +257. conflicts.at:2299: testing %expect-rr non GLR ... +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 1.y +./conflicts.at:1145: cat input.output +258. conflicts.at:2331: testing -W versus %expect and %expect-rr ... +stderr: + +stdout: +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret sr-rr.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none +./conflicts.at:513: $PREPARSER ./input +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Werror +stderr: +238. conflicts.at:1127: ok +syntax error, unexpected end of file +./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +211. conflicts.at:513: ok + +259. counterexample.at:43: testing Unifying S/R ... +./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +stderr: +input.y:7.5-7: warning: rule useless in parser due to conflicts [-Wother] +input.y:11.5-7: warning: rule useless in parser due to conflicts [-Wother] +input.y:17.11-26: warning: rule useless in parser due to conflicts [-Wother] +input.y:18.11-26: warning: rule useless in parser due to conflicts [-Wother] +input.y:19.11-26: warning: rule useless in parser due to conflicts [-Wother] +./conflicts.at:1882: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +stderr: +input.y: error: 4 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:1.1-5: error: useless precedence and associativity for '+' [-Werror=precedence] +input.y:2.1-5: error: useless precedence and associativity for '*' [-Werror=precedence] +255. conflicts.at:1855: ok +./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2 +261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ... +./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Werror +260. counterexample.at:83: testing Deep Unifying S/R ... +./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error stderr: input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] @@ -6943,23 +6872,112 @@ input.y:25.16: error: rule useless in parser due to conflicts [-Werror=other] input.y:31.5-7: error: rule useless in parser due to conflicts [-Werror=other] input.y:32.4: error: rule useless in parser due to conflicts [-Werror=other] -stderr: -input.y:7.5-7: warning: rule useless in parser due to conflicts [-Wother] -input.y:11.5-7: warning: rule useless in parser due to conflicts [-Wother] -input.y:17.11-26: warning: rule useless in parser due to conflicts [-Wother] -input.y:18.11-26: warning: rule useless in parser due to conflicts [-Wother] -input.y:19.11-26: warning: rule useless in parser due to conflicts [-Wother] -./conflicts.at:1882: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Werror ./conflicts.at:1638: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=error -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none -255. conflicts.at:1855: ./conflicts.at:753: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - ok -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror - -251. conflicts.at:1515: ok +./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A . B C + Shift derivation + s + `-> 2: y c + `-> 8: A . B `-> 4: C + Reduce derivation + s + `-> 1: a x + `-> 3: A . `-> 6: B C +input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +262. counterexample.at:207: testing Non-unifying Ambiguous S/R ... +./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +stdout: +./conflicts.at:732: $PREPARSER ./input +stderr: +stderr: +syntax error, unexpected 'a', expecting 'b' +./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A . B C + Shift derivation + s + `-> 1: ac + `-> 3: A ac C + `-> 4: b + `-> 5: . B + Reduce derivation + s + `-> 2: a bc + `-> 7: A . `-> 10: B C +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A A . B B C C + Shift derivation + s + `-> 1: ac + `-> 3: A ac C + `-> 3: A ac C + `-> 4: b + `-> 6: . b + `-> 5: B B + Reduce derivation + s + `-> 2: a bc + `-> 8: A a `-> 9: B bc C + `-> 7: A . `-> 10: B C +input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y +./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2 +./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: +stderr: +input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: A . B + Shift derivation + s + `-> 2: A xby + `-> 9: . B + Reduce derivation + s + `-> 1: ax by + `-> 3: A x `-> 6: B y + `-> 4: %empty . `-> 6: %empty +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + First example: A X . B Y $end + Shift derivation + $accept + `-> 0: s $end + `-> 2: A xby + `-> 10: X xby Y + `-> 9: . B + Second example: A X . B y $end + Reduce derivation + $accept + `-> 0: s $end + `-> 1: ax by + `-> 3: A x `-> 6: B y + `-> 5: X x + `-> 4: %empty . +input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=error ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none +./conflicts.at:2354: sed 's,.*/$,,' stderr 1>&2 +./counterexample.at:157: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: bison (GNU Bison) 3.8.2 init: 0.000000 # state items: 26 @@ -7238,85 +7256,8 @@ `-> 13: %empty . -257. conflicts.at:2299: testing %expect-rr non GLR ... -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 1.y -./conflicts.at:1959: sed 's,.*/$,,' stderr 1>&2 - -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none -258. conflicts.at:2331: testing -W versus %expect and %expect-rr ... -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Werror -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret sr-rr.y -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:1651: cat input.output -stderr: -1.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -./conflicts.at:1836: cat input.y >> input-keep.y -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-keep.y -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Werror -./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=error -./conflicts.at:2239: cat input.output | sed -n '/^State 0$/,/^State 1$/p' -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Werror -stderr: -sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -256. conflicts.at:1935: ok -./conflicts.at:2354: sed 's,.*/$,,' stderr 1>&2 -stderr: -stdout: -./conflicts.at:754: $PREPARSER ./input -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none - -stderr: -syntax error, unexpected 'a', expecting 'b' or 'c' -stderr: -./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input-keep.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input-keep.y: error: 2 reduce/reduce conflicts [-Werror=conflicts-rr] -input-keep.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input-keep.y:22.4: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:26.16: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:32.5-7: error: rule useless in parser due to conflicts [-Werror=other] -input-keep.y:33.4: error: rule useless in parser due to conflicts [-Werror=other] -228. conflicts.at:676: ok -./conflicts.at:1838: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=error -259. counterexample.at:43: testing Unifying S/R ... -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=error -./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y - -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none -260. counterexample.at:83: testing Deep Unifying S/R ... -./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A . B C - Shift derivation - s - `-> 2: y c - `-> 8: A . B `-> 4: C - Reduce derivation - s - `-> 1: a x - `-> 3: A . `-> 6: B C -input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 2.y -./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -stdout: -./conflicts.at:1096: $PREPARSER ./input -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none -stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -236. conflicts.at:1096: ok +./conflicts.at:1959: sed 's,.*/$,,' stderr 1>&2 stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] @@ -7325,54 +7266,31 @@ Reduce derivation s -> [ a -> [ A . ] x -> [ B C ] ] input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -254. conflicts.at:1592: ok - stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A . B C - Shift derivation - s - `-> 1: ac - `-> 3: A ac C - `-> 4: b - `-> 5: . B - Reduce derivation - s - `-> 2: a bc - `-> 7: A . `-> 10: B C -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A A . B B C C +input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] + First example: B . C $end Shift derivation - s - `-> 1: ac - `-> 3: A ac C - `-> 3: A ac C - `-> 4: b - `-> 6: . b - `-> 5: B B + $accept + `-> 0: g $end + `-> 2: x + `-> 6: bc + `-> 9: B . C + Second example: B . C D $end Reduce derivation - s - `-> 2: a bc - `-> 8: A a `-> 9: B bc C - `-> 7: A . `-> 10: B C + $accept + `-> 0: g $end + `-> 2: x + `-> 5: b cd + `-> 7: B . `-> 8: C D input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none 259. counterexample.at:43: ok -./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -stdout: -./conflicts.at:1096: $PREPARSER ./input -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Werror -stderr: -syntax error -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ... -./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y - - -235. conflicts.at:1096: ok +./counterexample.at:220: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y -Werror stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] @@ -7385,78 +7303,20 @@ Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ] input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -262. counterexample.at:207: testing Non-unifying Ambiguous S/R ... -stderr: -263. counterexample.at:254: testing Non-unifying Unambiguous S/R ... -./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y - -260. counterexample.at:83: ok -./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -2.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:2317: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=error -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -264. counterexample.at:298: testing S/R after first token ... +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none stderr: -./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A . B - Shift derivation - s - `-> 2: A xby - `-> 9: . B - Reduce derivation - s - `-> 1: ax by - `-> 3: A x `-> 6: B y - `-> 4: %empty . `-> 6: %empty -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - First example: A X . B Y $end - Shift derivation - $accept - `-> 0: s $end - `-> 2: A xby - `-> 10: X xby Y - `-> 9: . B - Second example: A X . B y $end - Reduce derivation - $accept - `-> 0: s $end - `-> 1: ax by - `-> 3: A x `-> 6: B y - `-> 5: X x - `-> 4: %empty . -input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:157: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -265. counterexample.at:363: testing Unifying R/R counterexample ... -./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none +260. counterexample.at:83: ok +stdout: +./conflicts.at:626: $PREPARSER ./input stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] - First example: B . C $end - Shift derivation - $accept - `-> 0: g $end - `-> 2: x - `-> 6: bc - `-> 9: B . C - Second example: B . C D $end - Reduce derivation - $accept - `-> 0: g $end - `-> 2: x - `-> 5: b cd - `-> 7: B . `-> 8: C D -input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:220: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +syntax error, unexpected 'b' +./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none + +263. counterexample.at:254: testing Non-unifying Unambiguous S/R ... +./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +222. conflicts.at:626: ok stderr: input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] @@ -7470,8 +7330,45 @@ Reduce derivation $accept -> [ s -> [ ax -> [ A x -> [ X x -> [ . ] ] ] by -> [ B y ] ] $end ] input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none +stderr: + +stdout: +261. counterexample.at:144: ok +264. counterexample.at:298: testing S/R after first token ... +./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +251. conflicts.at:1515: ok +./output.at:835: $CXX $CPPFLAGS $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS + + +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] + First example B . C $end + Shift derivation $accept -> [ g -> [ x -> [ bc -> [ B . C ] ] ] $end ] + Second example B . C D $end + Reduce derivation $accept -> [ g -> [ x -> [ b -> [ B . ] cd -> [ C D ] ] ] $end ] +input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none +stderr: +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +265. counterexample.at:363: testing Unifying R/R counterexample ... +./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none +262. counterexample.at:207: ok +./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y --warnings=error +./conflicts.at:1651: cat input.output +266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ... +./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + +267. counterexample.at:441: testing Non-unifying R/R LR(2) conflict ... stderr: stderr: +stdout: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] First example: A . A B $end @@ -7489,7 +7386,45 @@ `-> 1: t `-> 3: x `-> 3: x `-> 5: A `-> 5: A . +./conflicts.at:381: $PREPARSER ./input '0<0' ./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +stderr: +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1836: cat input.y >> input-keep.y +./conflicts.at:381: $PREPARSER ./input '0<0<0' +./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-keep.y +stderr: +syntax error, unexpected '<', expecting end of file +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +268. counterexample.at:488: testing Cex Search Prepend ... +./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +stdout: +./conflicts.at:381: $PREPARSER ./input '0>0' +./conflicts.at:1096: $PREPARSER ./input +stderr: +syntax error +stderr: +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 2.y +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: A b . + First reduce derivation + a + `-> 1: A b . + Second reduce derivation + a + `-> 1: A b + `-> 3: b . +input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] +./conflicts.at:381: $PREPARSER ./input '0>0>0' input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] Example: b . A X X Y @@ -7518,36 +7453,42 @@ input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +233. conflicts.at:1096: ok stderr: -261. counterexample.at:144: ok -input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: A b . +syntax error, unexpected '>', expecting end of file +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:314: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +./conflicts.at:381: $PREPARSER ./input '0<0>0' +input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] + First example: D . A $end First reduce derivation - a - `-> 1: A b . + $accept + `-> 0: s $end + `-> 1: a A + `-> 5: D . + Second example: B D . A $end Second reduce derivation - a - `-> 1: A b - `-> 3: b . -input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none -./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Werror -./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:314: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y - + $accept + `-> 0: s $end + `-> 4: B b A + `-> 6: D . +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none +./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] - First example B . C $end - Shift derivation $accept -> [ g -> [ x -> [ bc -> [ B . C ] ] ] $end ] - Second example B . C D $end - Reduce derivation $accept -> [ g -> [ x -> [ b -> [ B . ] cd -> [ C D ] ] ] $end ] -input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] + +syntax error, unexpected '>', expecting end of file +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:409: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y stderr: -./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y -Wnone,none -Werror --trace=none input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] First example A . A B $end @@ -7556,22 +7497,53 @@ Reduce derivation $accept -> [ s -> [ s -> [ t -> [ x -> [ A . ] ] ] t -> [ x -> [ A ] ] ] $end ] ./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr 263. counterexample.at:254: ok -266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ... -262. counterexample.at:207: ok -./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +269. counterexample.at:550: testing R/R cex with prec ... +./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Werror +stderr: stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: N A . B C + Shift derivation + s + `-> 1: n + `-> 6: N b + `-> 8: A . B C + Reduce derivation + s + `-> 2: n C + `-> 5: N a B + `-> 7: A . +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example: N N A . B D C + Shift derivation + s + `-> 1: n + `-> 4: N n C + `-> 6: N b + `-> 9: A . B D + Reduce derivation + s + `-> 2: n C + `-> 3: N n D + `-> 5: N a B + `-> 7: A . +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] Example A b . First reduce derivation a -> [ A b . ] Second reduce derivation a -> [ A b -> [ b . ] ] input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr ./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Werror +./counterexample.at:499: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +265. counterexample.at:363: ok + stderr: -stderr: -sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] Example b . A X X Y @@ -7585,43 +7557,9 @@ input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -265. counterexample.at:363: ok -./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2 -257. conflicts.at:2299: ok -264. counterexample.at:298: ok - - - -267. counterexample.at:441: testing Non-unifying R/R LR(2) conflict ... -./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error - -269. counterexample.at:550: testing R/R cex with prec ... -./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -268. counterexample.at:488: testing Cex Search Prepend ... -stderr: -input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] - First example: D . A $end - First reduce derivation - $accept - `-> 0: s $end - `-> 1: a A - `-> 5: D . - Second example: B D . A $end - Second reduce derivation - $accept - `-> 0: s $end - `-> 4: B b A - `-> 6: D . -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -271. counterexample.at:797: testing Non-unifying Prefix Share ... -./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:409: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y 270. counterexample.at:610: testing Null nonterminals ... ./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2239: cat input.output | sed -n '/^State 0$/,/^State 1$/p' stderr: input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] @@ -7631,8 +7569,48 @@ Second reduce derivation $accept -> [ s -> [ B b -> [ D . ] A ] $end ] input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none +./conflicts.at:388: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +264. counterexample.at:298: 256. conflicts.at:1935: ok + ok +266. counterexample.at:399: ok +271. counterexample.at:797: testing Non-unifying Prefix Share ... +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Werror +./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + + +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y --warnings=none -Werror --trace=none +stderr: +input-keep.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input-keep.y: error: 2 reduce/reduce conflicts [-Werror=conflicts-rr] +input-keep.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input-keep.y:22.4: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:26.16: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:32.5-7: error: rule useless in parser due to conflicts [-Werror=other] +input-keep.y:33.4: error: rule useless in parser due to conflicts [-Werror=other] +stderr: + +2.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other] +./conflicts.at:2317: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1838: sed 's,.*/$,,' stderr 1>&2 stderr: +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example N A . B C + Shift derivation s -> [ n -> [ N b -> [ A . B C ] ] ] + Reduce derivation s -> [ n -> [ N a -> [ A . ] B ] C ] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example N N A . B D C + Shift derivation s -> [ n -> [ N n -> [ N b -> [ A . B D ] ] C ] ] + Reduce derivation s -> [ n -> [ N n -> [ N a -> [ A . ] B ] D ] C ] +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +272. counterexample.at:842: testing Deep Null Unifying ... +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=error input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] Example: B . b c @@ -7662,42 +7640,18 @@ `-> 4: C `-> 6: %empty `-> 7: A c A `-> 5: %empty . `-> 7: %empty +./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y ./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -266. counterexample.at:399: ok +273. counterexample.at:884: testing Deep Null Non-unifying ... +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=error +./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y ./counterexample.at:562: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: N A . B C - Shift derivation - s - `-> 1: n - `-> 6: N b - `-> 8: A . B C - Reduce derivation - s - `-> 2: n C - `-> 5: N a B - `-> 7: A . -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: N N A . B D C - Shift derivation - s - `-> 1: n - `-> 4: N n C - `-> 6: N b - `-> 9: A . B D - Reduce derivation - s - `-> 2: n C - `-> 3: N n D - `-> 5: N a B - `-> 7: A . -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:499: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +274. synclines.at:194: testing Prologue syncline ... +./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +268. counterexample.at:488: ok stderr: +stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] Example: H i J . J J @@ -7711,37 +7665,18 @@ `-> 3: H i J J `-> 5: i J . input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] +sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples ./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none +./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2 ./counterexample.at:810: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -272. counterexample.at:842: testing Deep Null Unifying ... -./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example N A . B C - Shift derivation s -> [ n -> [ N b -> [ A . B C ] ] ] - Reduce derivation s -> [ n -> [ N a -> [ A . ] B ] C ] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example N N A . B D C - Shift derivation s -> [ n -> [ N n -> [ N b -> [ A . B D ] ] C ] ] - Reduce derivation s -> [ n -> [ N n -> [ N a -> [ A . ] B ] D ] C ] -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -stderr: -input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] - Example B . b c - First reduce derivation S -> [ B -> [ A -> [ B . ] b A -> [ ] ] C -> [ A -> [ ] c A -> [ ] ] ] - Second reduce derivation S -> [ B C -> [ A -> [ B -> [ A -> [ . ] b A -> [ ] ] ] c A -> [ ] ] ] -input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] - Example C . c b - First reduce derivation S -> [ C -> [ A -> [ C . ] c A -> [ ] ] B -> [ A -> [ ] b A -> [ ] ] ] - Second reduce derivation S -> [ C B -> [ A -> [ C -> [ A -> [ . ] c A -> [ ] ] ] b A -> [ ] ] ] -./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -269. counterexample.at:550: ok -268. counterexample.at:488: ok +275. synclines.at:214: testing %union syncline ... +./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:742: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none stderr: +./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] Example: A a . D @@ -7755,37 +7690,28 @@ `-> 3: b `-> 6: D `-> 4: c `-> 5: %empty . - - -./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] - Example H i J . J J - Shift derivation s -> [ a -> [ H i J . J ] J ] - Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ] -input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -274. synclines.at:194: testing Prologue syncline ... -273. counterexample.at:884: testing Deep Null Non-unifying ... -./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -271. counterexample.at:797: ok -./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - stderr: +./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - Example A a . D - Shift derivation s -> [ A a d -> [ . D ] ] - Reduce derivation s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] ] -./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -275. synclines.at:214: testing %union syncline ... -./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -272. counterexample.at:842: ok -./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror + First example: A a . D $end + Shift derivation + $accept + `-> 0: s $end + `-> 1: A a d + `-> 6: . D + Second example: A a . D E $end + Reduce derivation + $accept + `-> 0: s $end + `-> 2: A a a d E + `-> 3: b `-> 6: D + `-> 4: c + `-> 5: %empty . +./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none +./counterexample.at:896: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" @@ -7818,32 +7744,34 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF - stdout: syncline.c:4: #error "4" -stderr: ./synclines.at:194: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 ./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c input.c -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - First example: A a . D $end - Shift derivation - $accept - `-> 0: s $end - `-> 1: A a d - `-> 6: . D - Second example: A a . D E $end - Reduce derivation - $accept - `-> 0: s $end - `-> 2: A a a d E - `-> 3: b `-> 6: D - `-> 4: c - `-> 5: %empty . -./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:896: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] + Example B . b c + First reduce derivation S -> [ B -> [ A -> [ B . ] b A -> [ ] ] C -> [ A -> [ ] c A -> [ ] ] ] + Second reduce derivation S -> [ B C -> [ A -> [ B -> [ A -> [ . ] b A -> [ ] ] ] c A -> [ ] ] ] +input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] + Example C . c b + First reduce derivation S -> [ C -> [ A -> [ C . ] c A -> [ ] ] B -> [ A -> [ ] b A -> [ ] ] ] + Second reduce derivation S -> [ C B -> [ A -> [ C -> [ A -> [ . ] c A -> [ ] ] ] b A -> [ ] ] ] +./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +269. counterexample.at:550: ok ./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c -276. synclines.at:237: testing %union name syncline ... +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] + Example H i J . J J + Shift derivation s -> [ a -> [ H i J . J ] J ] + Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ] +input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none + stderr: input.y:2:2: error: #error "2" 2 | #error "2" @@ -7875,13 +7803,12 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -syncline.c: In function 'foo': +271. counterexample.at:797: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -stdout: + ok ./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7909,18 +7836,42 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +stdout: +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none input.y:2: #error "2" stdout: -stderr: ./synclines.at:194: cat stdout syncline.c:4: #error "4" ./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c -./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] + Example A a . D + Shift derivation s -> [ A a d -> [ . D ] ] + Reduce derivation s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] ] + +./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +stderr: 274. synclines.at:194: ok +./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c +276. synclines.at:237: testing %union name syncline ... +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] + First example A a . D $end + Shift derivation $accept -> [ s -> [ A a d -> [ . D ] ] $end ] + Second example A a . D E $end + Reduce derivation $accept -> [ s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] E ] $end ] +./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./synclines.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +273. counterexample.at:884: ok +272. counterexample.at:842: ok + +277. synclines.at:264: testing Postprologue syncline ... +./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + + stderr: +257. conflicts.at:2299: ok input.y:2:2: error: #error "2" 2 | #error "2" | ^~~~~ @@ -7951,21 +7902,26 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - First example A a . D $end - Shift derivation $accept -> [ s -> [ A a d -> [ . D ] ] $end ] - Second example A a . D E $end - Reduce derivation $accept -> [ s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] E ] $end ] -./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c -./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr - -273. counterexample.at:884: ok stdout: input.y:2: #error "2" ./synclines.at:214: cat stdout +278. synclines.at:291: testing Action syncline ... +./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none + +280. synclines.at:327: testing %code top syncline ... +./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +275. synclines.at:214: ok +279. synclines.at:310: testing Epilogue syncline ... +./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c +254. conflicts.at:1592: + ok +281. synclines.at:346: testing %destructor syncline ... +./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + stderr: +./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" @@ -7997,18 +7953,161 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF - stdout: syncline.c:4: #error "4" -275. synclines.at:214: ./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 - ok +./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 ./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c -277. synclines.at:264: testing Postprologue syncline ... -./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -278. synclines.at:291: testing Action syncline ... -./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c +283. synclines.at:440: testing syncline escapes: yacc.c ... +282. synclines.at:370: testing %printer syncline ... +./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 +./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y +stdout: +syncline.c:4: #error "4" +stderr: +./synclines.at:264: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + +./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c +stdout: +stderr: +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + +syncline.c:4: #error "4" +stderr: +./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c input.c +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +stdout: +./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + +syncline.c:4: #error "4" +./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c +stdout: +syncline.c:4: #error "4" stderr: input.y:1:7: error: expected '{' before 'break' 1 | %union break @@ -8167,7 +8266,10 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 stdout: +./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c +stderr: input.y:1: #error expected '{' before 'break' %union break ^~~~~ @@ -8296,59 +8398,12 @@ yydestruct ("Error: discarding", ^~~~~~~~~~ ./synclines.at:255: grep '^input.y:1' stdout -stdout: -input.y:1: #error expected '{' before 'break' -input.y:1: #error expected '{' before 'break' -279. synclines.at:310: testing Epilogue syncline ... -276. synclines.at:237: ok -./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c -./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c -stderr: -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -280. synclines.at:327: testing %code top syncline ... -./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: -syncline.c:4: #error "4" -./synclines.at:264: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 stderr: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8375,12 +8430,6 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c -stdout: -syncline.c:4: #error "4" -./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c input.c -stderr: input.y:13:2: error: #error "13" 13 | #error "13" | ^~~~~ @@ -8411,11 +8460,19 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c stdout: +input.y:1: #error expected '{' before 'break' +input.y:1: #error expected '{' before 'break' +stdout: +276. synclines.at:237: ok +stdout: +syncline.c:4: #error "4" +./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 input.y:13: #error "13" ./synclines.at:264: cat stdout +./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c stderr: +./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c input.y: In function 'yyparse': input.y:8:2: error: #error "8" 8 | #error "8" @@ -8447,54 +8504,8 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -277. synclines.at:264: ok -stderr: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -stdout: -input.y:8: #error "8" -./synclines.at:291: cat stdout -stdout: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -syncline.c:4: #error "4" -./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c -278. synclines.at:291: ok - -./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: +277. synclines.at:264: ok input.y:8:2: error: #error "8" 8 | #error "8" | ^~~~~ @@ -8525,60 +8536,15 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stderr: - -281. synclines.at:346: testing %destructor syncline ... -stdout: -./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -input.y:8: #error "8" -./synclines.at:310: cat stdout -stdout: -syncline.c:4: #error "4" -./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c -282. synclines.at:370: testing %printer syncline ... -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -279. synclines.at:310: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: +stdout: input.y:2:2: error: #error "2" 2 | #error "2" | ^~~~~ -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stdout: +input.y:8: #error "8" +./synclines.at:291: cat stdout ./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8606,24 +8572,15 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stdout: -input.y:2: #error "2" -./synclines.at:327: cat stdout -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -280. synclines.at:327: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -283. synclines.at:440: testing syncline escapes: yacc.c ... -./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 -./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c +input.y:8: #error "8" +./synclines.at:310: cat stdout stderr: -syncline.c: In function 'foo': +278. synclines.at:291: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + ok +./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8652,55 +8609,16 @@ stdout: -syncline.c:4: #error "4" -./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c +input.y:2: #error "2" stderr: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -284. synclines.at:440: testing syncline escapes: glr.c ... -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:327: cat stdout +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror stdout: syncline.c:4: #error "4" -./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 -stderr: stdout: -./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 stderr: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y +279. synclines.at:310: ok input.y: In function 'yydestruct': input.y:2:2: error: #error "2" 2 | #error "2" @@ -8732,19 +8650,33 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +280. synclines.at:327: ok + +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c input.y:2: #error "2" ./synclines.at:346: cat stdout +284. synclines.at:440: testing syncline escapes: glr.c ... +./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 +285. synclines.at:440: testing syncline escapes: lalr1.cc ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 + + +286. synclines.at:440: testing syncline escapes: glr.cc ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 281. synclines.at:346: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +288. synclines.at:497: testing %no-lines: yacc.c ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y +287. synclines.at:440: testing syncline escapes: glr2.cc ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 stderr: input.y: In function 'yy_symbol_value_print': input.y:2:2: error: #error "2" 2 | #error "2" | ^~~~~ +stderr: ./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8772,478 +8704,476 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] stdout: input.y:2: #error "2" ./synclines.at:370: cat stdout ./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS -285. synclines.at:440: testing syncline escapes: lalr1.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -stderr: +./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error +289. synclines.at:497: testing %no-lines: glr.c ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y 282. synclines.at:370: ok -stdout: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -286. synclines.at:440: testing syncline escapes: glr.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: stdout: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:497: mv input.c without.c +./synclines.at:497: mv input.h without.h +./synclines.at:497: grep '#line' *.c *.h +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y stderr: -stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: -./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS -283. synclines.at:440: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file - -./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -287. synclines.at:440: testing syncline escapes: glr2.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -stderr: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./conflicts.at:518: $PREPARSER ./input +290. synclines.at:497: testing %no-lines: lalr1.cc ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y stderr: -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -stdout: -./conflicts.at:1096: $PREPARSER ./input +syntax error, unexpected end of file +./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +212. conflicts.at:518: ok +./synclines.at:497: mv input.c without.c stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -237. conflicts.at:1096: ok -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y stderr: stdout: -284. synclines.at:440: ok - - -288. synclines.at:497: testing %no-lines: yacc.c ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:497: mv input.c without.c +stdout: ./synclines.at:497: mv input.h without.h +./conflicts.at:388: $PREPARSER ./input '0<0' +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +stderr: ./synclines.at:497: grep '#line' *.c *.h -289. synclines.at:497: testing %no-lines: glr.c ... +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:497: mv input.c without.c + +stderr: +./conflicts.at:388: $PREPARSER ./input '0<0<0' +stdout: +stderr: +syntax error, unexpected '<', expecting end of file +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./synclines.at:497: mv input.c with.c -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:497: mv input.h without.h +./conflicts.at:388: $PREPARSER ./input '0>0' +stderr: ./synclines.at:497: mv input.h with.h +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./synclines.at:497: grep -v '#line' with.c >expout -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +./conflicts.at:388: $PREPARSER ./input '0>0>0' +stdout: ./synclines.at:497: cat without.c -./synclines.at:497: grep '#line' *.c *.h +291. synclines.at:497: testing %no-lines: glr.cc ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y +./synclines.at:497: mv input.cc without.cc +stderr: +./conflicts.at:1096: $PREPARSER ./input +syntax error, unexpected '>', expecting end of file +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS ./synclines.at:497: grep -v '#line' with.h >expout -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +./synclines.at:497: mv input.hh without.hh +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./synclines.at:497: grep '#line' *.cc *.hh ./synclines.at:497: cat without.h -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:388: $PREPARSER ./input '0<0>0' +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +stderr: +syntax error, unexpected '>', expecting end of file +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +234. conflicts.at:1096: ok 288. synclines.at:497: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file - -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +209. conflicts.at:301: ok ./synclines.at:497: mv input.c with.c + +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none ./synclines.at:497: mv input.h with.h -290. synclines.at:497: testing %no-lines: lalr1.cc ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./synclines.at:497: grep -v '#line' with.c >expout ./synclines.at:497: cat without.c -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -./synclines.at:497: grep -v '#line' with.h >expout -./synclines.at:497: cat without.h -289. synclines.at:497: ok -./synclines.at:497: mv input.cc without.cc -./synclines.at:497: mv input.hh without.hh -./synclines.at:497: grep '#line' *.cc *.hh -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -291. synclines.at:497: testing %no-lines: glr.cc ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -./synclines.at:497: mv input.cc with.cc -./synclines.at:497: mv input.hh with.hh -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:497: grep -v '#line' with.cc >expout -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:497: cat without.cc -./synclines.at:497: grep -v '#line' with.hh >expout -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:497: cat without.hh -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -290. synclines.at:497: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:497: mv input.cc without.cc -./synclines.at:497: mv input.hh without.hh -./synclines.at:497: grep '#line' *.cc *.hh -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: grep -v '#line' with.h >expout +./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +./synclines.at:497: cat without.h 292. synclines.at:497: testing %no-lines: glr2.cc ... ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +stderr: +stderr: +289. synclines.at:497: ok +stdout: +stdout: +283. synclines.at:440: 294. headers.at:56: testing Invalid CPP guards: --defines=input/input.h ... + ok +./conflicts.at:743: $PREPARSER ./input ./synclines.at:497: mv input.cc with.cc +stderr: +syntax error, unexpected 'a', expecting 'b' or 'c' ./synclines.at:497: mv input.hh with.hh +./headers.at:56: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y +293. synclines.at:507: testing Output columns ... +./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./synclines.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + ./synclines.at:497: grep -v '#line' with.cc >expout + +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y ./synclines.at:497: cat without.cc ./synclines.at:497: mv input.cc without.cc ./synclines.at:497: grep -v '#line' with.hh >expout ./synclines.at:497: mv input.hh without.hh ./synclines.at:497: cat without.hh ./synclines.at:497: grep '#line' *.cc *.hh +290. synclines.at:497: ok ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -291. synclines.at:497: ok - ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -293. synclines.at:507: testing Output columns ... -./synclines.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +295. headers.at:57: testing Invalid CPP guards: --defines=9foo.h ... +./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y +296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ... ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:497: mv input.cc with.cc -./synclines.at:497: mv input.hh with.hh -./synclines.at:497: grep -v '#line' with.cc >expout -./synclines.at:497: cat without.cc -./synclines.at:497: grep -v '#line' with.hh >expout -./synclines.at:497: cat without.hh -292. synclines.at:497: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -294. headers.at:56: testing Invalid CPP guards: --defines=input/input.h ... +./synclines.at:497: mv input.cc without.cc +./synclines.at:497: mv input.hh without.hh ./synclines.at:541: sed -ne '/--BEGIN/,/--END/{' \ -e '/input.c/s/ [0-9]* / LINE /;' \ -e 'p;}' \ input.c +./synclines.at:497: grep '#line' *.cc *.hh +297. headers.at:59: testing Invalid CPP guards: %glr-parser --defines=9foo.h ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./headers.at:56: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c 293. synclines.at:507: ok -./headers.at:56: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y +./headers.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror +./headers.at:57: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:497: mv input.cc with.cc +./synclines.at:497: mv input.hh with.hh +./headers.at:58: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c +./synclines.at:497: grep -v '#line' with.cc >expout +./synclines.at:497: cat without.cc +./synclines.at:497: grep -v '#line' with.hh >expout +298. headers.at:67: testing export YYLTYPE ... ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -295. headers.at:57: testing Invalid CPP guards: --defines=9foo.h ... -./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header -o input.c input.y +./synclines.at:497: cat without.hh ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:56: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c -./headers.at:57: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c -stderr: -stdout: -285. synclines.at:440: ok +291. synclines.at:497: ok +./headers.at:59: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c +./synclines.at:497: mv input.cc with.cc ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: mv input.hh with.hh + +./synclines.at:497: grep -v '#line' with.cc >expout +./synclines.at:497: cat without.cc ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./synclines.at:497: grep -v '#line' with.hh >expout +./synclines.at:497: cat without.hh +./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2 +292. synclines.at:497: ok ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error +299. headers.at:177: testing Sane headers: ... +./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ... -./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y -258. conflicts.at:2331: ok -stderr: -stdout: +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Werror stderr: stdout: -295. headers.at:57: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file 294. headers.at:56: ok - - - -297. headers.at:59: testing Invalid CPP guards: %glr-parser --defines=9foo.h ... -299. headers.at:177: testing Sane headers: ... -./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -298. headers.at:67: testing export YYLTYPE ... -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header -o input.c input.y -./headers.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y -./headers.at:58: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Werror -./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./headers.at:59: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c stderr: stdout: -286. synclines.at:440: ok stderr: -input.y:11.1-18: error: deprecated directive: '%name-prefix "my_"', use '%define api.prefix {my_}' [-Werror=deprecated] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./headers.at:85: sed 's,.*/$,,' stderr 1>&2 - -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=error 300. headers.at:178: testing Sane headers: %locations %debug ... ./headers.at:178: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Wnone,none -Werror --trace=none -./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none -./headers.at:102: $CC $CFLAGS $CPPFLAGS -c -o caller.o caller.c -stderr: -stderr: -stdout: +./conflicts.at:564: $PREPARSER ./input +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: -./headers.at:103: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +295. headers.at:57: ok stderr: -stdout: -299. headers.at:177: ok +syntax error, unexpected end of file, expecting 'b' +./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -301. headers.at:180: testing Sane headers: %glr-parser ... -./headers.at:180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stderr: -stdout: -./headers.at:104: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o caller caller.o input.o $LIBS -stderr: -stdout: -./headers.at:105: $PREPARSER ./caller -stderr: -./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -298. headers.at:67: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: -./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./conflicts.at:558: $PREPARSER ./input stderr: -stdout: -300. headers.at:178: ok +syntax error, unexpected end of file, expecting 'b' +stderr: +./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y:11.1-18: error: deprecated directive: '%name-prefix "my_"', use '%define api.prefix {my_}' [-Werror=deprecated] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none +219. conflicts.at:558: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:85: sed 's,.*/$,,' stderr 1>&2 +220. conflicts.at:564: ok stderr: +301. headers.at:180: testing Sane headers: %glr-parser ... stdout: -296. headers.at:58: ok +./headers.at:180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=error +./output.at:836: $PREPARSER ./parser +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file 302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ... ./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +stderr: + +./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +144. output.at:744: ok +./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c 303. headers.at:183: testing Sane headers: api.pure ... ./headers.at:183: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -stderr: -stdout: -297. headers.at:59: ok 304. headers.at:184: testing Sane headers: api.push-pull=both ... -./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c ./headers.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y - +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none 305. headers.at:185: testing Sane headers: api.pure api.push-pull=both ... -./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c ./headers.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c ./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:753: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:102: $CC $CFLAGS $CPPFLAGS -c -o caller.o caller.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: stdout: +./headers.at:103: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: -287. synclines.at:440: ok stdout: -./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c - +./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: -303. headers.at:183: ok +299. headers.at:177: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 306. headers.at:187: testing Sane headers: c++ ... +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./headers.at:187: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -307. headers.at:188: testing Sane headers: %locations %debug c++ ... -./headers.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file stdout: -./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stderr: stdout: -304. headers.at:184: ok +./headers.at:104: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o caller caller.o input.o $LIBS +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc stderr: stdout: -./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc stderr: -stdout: -305. headers.at:185: ok - - -308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ... -./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -309. headers.at:191: testing Sane headers: %locations c++ %glr-parser ... -./headers.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./conflicts.at:1096: $PREPARSER ./input stderr: stdout: -./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stderr: -stdout: -301. headers.at:180: ok - -310. headers.at:199: testing Several parsers ... -./headers.at:320: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x1.c x1.y -./headers.at:320: $CC $CFLAGS $CPPFLAGS -c -o x1.o x1.c stderr: stdout: -./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -stderr: +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +303. headers.at:183: ok +./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stdout: -302. headers.at:181: ok +235. conflicts.at:1096: ok + +./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -311. actions.at:24: testing Midrule actions ... -./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -./actions.at:60: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./headers.at:320: echo "x1" >>expout -./headers.at:321: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x2.c x2.y -./headers.at:321: $CC $CFLAGS $CPPFLAGS -c -o x2.o x2.c stderr: stdout: -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./headers.at:105: $PREPARSER ./caller stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: H i . J K $end - Shift derivation - $accept - `-> 0: a $end - `-> 2: H i - `-> 4: i . J K - Second example: H i . J $end - Reduce derivation - $accept - `-> 0: s $end - `-> 1: a J - `-> 2: H i . -input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -stderr: +298. headers.at:67: stderr: stdout: -./actions.at:61: $PREPARSER ./input -stderr: -./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -311. actions.at:24: ok + ok +307. headers.at:188: testing Sane headers: %locations %debug c++ ... +./headers.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y +305. headers.at:185: 304. headers.at:184: ok + ok -312. actions.at:72: testing Typed midrule actions ... -./actions.at:109: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -stderr: -stdout: -./headers.at:321: echo "x2" >>expout -./actions.at:110: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./headers.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x3.c x3.y -./headers.at:322: $CC $CFLAGS $CPPFLAGS -c -o x3.o x3.c + + +308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ... +./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y +311. actions.at:24: testing Midrule actions ... +./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +309. headers.at:191: testing Sane headers: %locations c++ %glr-parser ... +./headers.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +310. headers.at:199: testing Several parsers ... +./headers.at:320: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x1.c x1.y +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc stderr: stdout: -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc stderr: stdout: -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:754: $PREPARSER ./input stderr: -stdout: -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +syntax error, unexpected 'a', expecting 'b' or 'c' +./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:60: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +228. conflicts.at:676: ok +296. headers.at:58: ok stderr: stdout: stderr: +284. synclines.at:440: ok + stdout: -308. headers.at:189: ok -306. headers.at:187: ok +./headers.at:320: $CC $CFLAGS $CPPFLAGS -c -o x1.o x1.c + +300. headers.at:178: ok -313. actions.at:122: testing Implicitly empty rule ... -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Werror 314. actions.at:172: testing Invalid uses of %empty ... ./actions.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret one.y +313. actions.at:122: testing Implicitly empty rule ... +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./actions.at:192: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -u one.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +315. actions.at:240: testing Valid uses of %empty ... +./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +312. actions.at:72: testing Typed midrule actions ... +./actions.at:109: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./actions.at:202: sed -e '1,8d' one.y -./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret two.y -314. actions.at:172: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: stdout: -./actions.at:111: $PREPARSER ./input -stderr: - -./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] - 11 | a: /* empty. */ {}; - | ^~ - | %empty -1.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -312. actions.at:72: ok -./actions.at:133: sed 's,.*/$,,' stderr 1>&2 -315. actions.at:240: testing Valid uses of %empty ... -./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Werror +297. headers.at:59: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret two.y -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error +./actions.at:259: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 316. actions.at:270: testing Add missing %empty ... ./actions.at:285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update -Wall input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -input.y:3.4-5: warning: empty rule without %empty [-Wempty-rule] +314. actions.at:172: input.y:3.4-5: warning: empty rule without %empty [-Wempty-rule] input.y:4.3-5.1: warning: empty rule without %empty [-Wempty-rule] input.y:6.3: warning: empty rule without %empty [-Wempty-rule] input.y:8.2: warning: empty rule without %empty [-Wempty-rule] input.y:9.3: warning: empty rule without %empty [-Wempty-rule] bison: file 'input.y' was updated (backup: 'input.y~') ./actions.at:286: cat input.y + ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./actions.at:300: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y -./actions.at:259: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none -316. actions.at:270: ok -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=none -Werror --trace=none +stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +1.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] + 11 | a: /* empty. */ {}; + | ^~ + | %empty +1.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:133: sed 's,.*/$,,' stderr 1>&2 317. actions.at:365: testing Initial location: yacc.c ... +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error ./actions.at:365: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:110: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +316. actions.at:270: ok stderr: stdout: -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret 2.y +./conflicts.at:1096: $PREPARSER ./input +stderr: + +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:365: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +236. conflicts.at:1096: ok +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none + +318. actions.at:366: testing Initial location: yacc.c api.pure=full ... +./actions.at:366: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +319. actions.at:367: testing Initial location: yacc.c api.pure %parse-param { int x } ... +./actions.at:367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=none -Werror --trace=none +./actions.at:366: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret 2.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +stdout: +./headers.at:320: echo "x1" >>expout +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:321: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x2.c x2.y +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Werror +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +./headers.at:321: $CC $CFLAGS $CPPFLAGS -c -o x2.o x2.c stderr: stdout: ./actions.at:260: $PREPARSER ./input stderr: ./actions.at:260: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -315. actions.at:240: ok -stderr: -stdout: -./headers.at:322: echo "x3" >>expout stderr: 2.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] 11 | a: /* empty. */ {}; @@ -9254,66 +9184,61 @@ | ^~ | %empty 2.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x4.c x4.y -./actions.at:149: sed 's,.*/$,,' stderr 1>&2 +315. actions.at:240: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:149: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +320. actions.at:368: testing Initial location: yacc.c api.push-pull=both ... +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=error +./actions.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -318. actions.at:366: testing Initial location: yacc.c api.pure=full ... stdout: -./actions.at:366: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Wnone,none -Werror --trace=none -./actions.at:366: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./headers.at:323: $CC $CFLAGS $CPPFLAGS -c -o x4.o x4.c -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none +./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: stdout: -./actions.at:365: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error -./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y -317. actions.at:365: ok +301. headers.at:180: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -319. actions.at:367: testing Initial location: yacc.c api.pure %parse-param { int x } ... -313. actions.at:122: ok -./actions.at:367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Wnone,none -Werror --trace=none +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./actions.at:366: $PREPARSER ./input +./actions.at:61: $PREPARSER ./input stderr: -1.1 -1.1: syntax error -./actions.at:366: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -318. actions.at:366: ok stderr: stdout: -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc - -./actions.at:367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -320. actions.at:368: testing Initial location: yacc.c api.push-pull=both ... -./actions.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file 321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ... ./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -stderr: -stdout: -./actions.at:368: $PREPARSER ./input +./actions.at:365: $PREPARSER ./input +./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.1 1.1: syntax error -./actions.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -320. actions.at:368: ok +./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +317. actions.at:365: 311. actions.at:24: ok + ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file + +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none 322. actions.at:370: testing Initial location: glr.c ... +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./actions.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +323. actions.at:371: testing Initial location: glr.c api.pure ... +./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: ./actions.at:367: $PREPARSER ./input @@ -9321,192 +9246,181 @@ 1.1 1.1: syntax error ./actions.at:367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file 319. actions.at:367: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./actions.at:370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -323. actions.at:371: testing Initial location: glr.c api.pure ... -./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y stderr: stdout: -./actions.at:369: $PREPARSER ./input + +285. synclines.at:440: ok +258. conflicts.at:2331: ok ./actions.at:371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -1.1 -1.1: syntax error -./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -321. actions.at:369: ok + stderr: stdout: -309. headers.at:191: ok - +./actions.at:366: $PREPARSER ./input +stderr: +1.1 +1.1: syntax error +./actions.at:366: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 324. actions.at:372: testing Initial location: lalr1.cc ... ./actions.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -325. actions.at:373: testing Initial location: glr.cc ... -./actions.at:373: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:372: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:373: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./headers.at:323: echo "x4" >>expout -./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x5.cc x5.y -./headers.at:324: $CXX $CPPFLAGS $CXXFLAGS -c -o x5.o x5.cc +318. actions.at:366: ok stderr: stdout: -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:111: $PREPARSER ./input +325. actions.at:373: testing Initial location: glr.cc ... +./actions.at:373: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -stdout: -307. headers.at:188: ok - 326. actions.at:374: testing Initial location: glr2.cc ... ./actions.at:374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./actions.at:371: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error -./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -323. actions.at:371: ok +./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +312. actions.at:72: 313. actions.at:122: ok + ok + 327. actions.at:383: testing Initial location: yacc.c api.pure=full ... ./actions.at:383: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:383: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./actions.at:370: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error -./actions.at:370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -322. actions.at:370: ok - +./actions.at:372: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 328. actions.at:394: testing Initial location: yacc.c api.pure=full ... ./actions.at:394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +329. actions.at:478: testing Location print: yacc.c ... +./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:373: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:383: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:394: $PREPARSER ./input -stderr: +./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stderr: stdout: -0 -0: syntax error -./actions.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:383: $PREPARSER ./input -stderr: - -: syntax error -./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -328. actions.at:394: ok -327. actions.at:383: ok - +302. headers.at:181: ok -329. actions.at:478: testing Location print: yacc.c ... -./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 330. actions.at:478: testing Location print: glr.c ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:478: $PREPARSER ./input -stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -329. actions.at:478: ok +286. synclines.at:440: ok stderr: -331. actions.at:478: testing Location print: lalr1.cc ... -./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stdout: -./actions.at:373: $PREPARSER ./input +./actions.at:368: $PREPARSER ./input stderr: 1.1 1.1: syntax error -./actions.at:373: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -325. actions.at:373: ok +./actions.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +320. actions.at:368: ok +331. actions.at:478: testing Location print: lalr1.cc ... +./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./headers.at:321: echo "x2" >>expout +./headers.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x3.c x3.y 332. actions.at:478: testing Location print: glr.cc ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +./headers.at:322: $CC $CFLAGS $CPPFLAGS -c -o x3.o x3.c stdout: -./actions.at:372: $PREPARSER ./input +./actions.at:369: $PREPARSER ./input stderr: 1.1 1.1: syntax error -./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -324. actions.at:372: ok +./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +321. actions.at:369: ok -./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 333. actions.at:478: testing Location print: glr2.cc ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./headers.at:324: echo "x5" >>expout -./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x6.c x6.y -./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:383: $PREPARSER ./input +stderr: + +: syntax error +./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +327. actions.at:383: ok stdout: -./headers.at:325: $CC $CFLAGS $CPPFLAGS -c -o x6.o x6.c ./actions.at:478: $PREPARSER ./input + +stderr: stderr: +stdout: +./actions.at:394: $PREPARSER ./input ./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -330. actions.at:478: ok - +./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +0 +0: syntax error +./actions.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +329. actions.at:478: ok +328. actions.at:394: ok 334. actions.at:488: testing Exotic Dollars ... ./actions.at:532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y + + +335. actions.at:1047: testing Printers and Destructors ... +./actions.at:1047: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +336. actions.at:1048: testing Printers and Destructors with union ... +./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:533: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1047: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1048: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./headers.at:325: echo "x6" >>expout -./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x7.c x7.y -./headers.at:326: $CC $CFLAGS $CPPFLAGS -c -o x7.o x7.c +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] -time limit exceeded: 6.000000 - First example H i . J K $end - Shift derivation $accept -> [ a -> [ H i -> [ i . J K ] ] $end ] - Second example H i . J $end - Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ] -input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -267. counterexample.at:441: ok +stdout: +./actions.at:370: $PREPARSER ./input +stderr: +1.1 +1.1: syntax error +./actions.at:370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +322. actions.at:370: ok + +337. actions.at:1050: testing Printers and Destructors: %glr-parser ... +./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./actions.at:371: $PREPARSER ./input +stderr: +1.1 +1.1: syntax error +./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +323. actions.at:371: ok -335. actions.at:1047: testing Printers and Destructors ... -./actions.at:1047: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: ./actions.at:534: $PREPARSER ./input stderr: ./actions.at:534: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: -./headers.at:326: echo "x7" >>expout -./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x8.c x8.y +338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ... +./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1050: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:562: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1047: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./headers.at:327: $CC $CFLAGS $CPPFLAGS -c -o x8.o x8.c +./actions.at:1051: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:563: $PREPARSER ./input +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -334. actions.at:488: ok - -336. actions.at:1048: testing Printers and Destructors with union ... -./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1048: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: stderr: stdout: -./headers.at:327: echo "x8" >>expout -./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x9.cc x9.y +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: stdout: -./actions.at:1047: $PREPARSER ./input '(x)' +./actions.at:1048: $PREPARSER ./input '(x)' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9519,9 +9433,11 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1047: $PREPARSER ./input '!' +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1048: $PREPARSER ./input '!' stderr: +stderr: +stdout: sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -9529,8 +9445,12 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1047: $PREPARSER ./input '!!!' +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: $PREPARSER ./input +stderr: +syntax error +./actions.at:1048: $PREPARSER ./input '!!!' +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -9541,9 +9461,39 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +237. conflicts.at:1096: ok +./actions.at:1047: $PREPARSER ./input '(x)' +./actions.at:1048: $PREPARSER ./input '(y)' +stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1047: $PREPARSER ./input '(y)' stderr: + +./actions.at:1047: $PREPARSER ./input '!' +stdout: +stderr: +stderr: +./actions.at:478: $PREPARSER ./input +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -9556,9 +9506,21 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o x9.o x9.cc -./actions.at:1047: $PREPARSER ./input '(xxxxx)(x)(x)y' +stdout: +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:322: echo "x3" >>expout +./actions.at:1048: $PREPARSER ./input '(xxxxx)(x)(x)y' +330. actions.at:478: ok +./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x4.c x4.y +stderr: +./actions.at:1047: $PREPARSER ./input '!!!' +stdout: +stderr: +./actions.at:563: $PREPARSER ./input stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9595,125 +9557,7 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1047: $PREPARSER ./input '(x)(x)x' -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: 'x' (6@60-69) -thing (6@60-69): 'x' (6@60-69) -sending: END (7@70-79) -70.70-79.78: syntax error, unexpected END, expecting 'x' -Freeing nterm thing (6@60-69) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Freeing token END (7@70-79) -Parsing FAILED. -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1047: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: '(' (6@60-69) -sending: 'x' (7@70-79) -thing (7@70-79): 'x' (7@70-79) -sending: ')' (8@80-89) -line (6@60-89): '(' (6@60-69) thing (7@70-79) ')' (8@80-89) -sending: '(' (9@90-99) -sending: 'x' (10@100-109) -thing (10@100-109): 'x' (10@100-109) -sending: ')' (11@110-119) -line (9@90-119): '(' (9@90-99) thing (10@100-109) ')' (11@110-119) -sending: '(' (12@120-129) -sending: 'x' (13@130-139) -thing (13@130-139): 'x' (13@130-139) -sending: ')' (14@140-149) -line (12@120-149): '(' (12@120-129) thing (13@130-139) ')' (14@140-149) -sending: '(' (15@150-159) -sending: 'x' (16@160-169) -thing (16@160-169): 'x' (16@160-169) -sending: ')' (17@170-179) -line (15@150-179): '(' (15@150-159) thing (16@160-169) ')' (17@170-179) -sending: '(' (18@180-189) -sending: 'x' (19@190-199) -thing (19@190-199): 'x' (19@190-199) -sending: ')' (20@200-209) -200.200-209.208: memory exhausted -Freeing nterm thing (19@190-199) -Freeing nterm line (15@150-179) -Freeing nterm line (12@120-149) -Freeing nterm line (9@90-119) -Freeing nterm line (6@60-89) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Parsing FAILED (status 2). -./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -335. actions.at:1047: ./actions.at:478: $PREPARSER ./input - ok -stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -331. actions.at:478: ok - -337. actions.at:1050: testing Printers and Destructors: %glr-parser ... -./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ... -./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1050: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1051: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./actions.at:478: $PREPARSER ./input -stderr: -stdout: -stderr: -./actions.at:1048: $PREPARSER ./input '(x)' -stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -332. actions.at:478: ok -./actions.at:1048: $PREPARSER ./input '!' -stderr: -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./actions.at:1048: $PREPARSER ./input '!!!' -stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -9723,8 +9567,11 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1048: $PREPARSER ./input '(y)' +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + +./actions.at:1047: $PREPARSER ./input '(y)' +./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -9738,8 +9585,10 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1048: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +334. actions.at:488: ok +./actions.at:1047: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1048: $PREPARSER ./input '(x)(x)x' 339. actions.at:1053: testing Printers and Destructors: %header lalr1.cc ... ./actions.at:1053: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: @@ -9778,8 +9627,7 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1048: $PREPARSER ./input '(x)(x)x' +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9801,6 +9649,29 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1047: $PREPARSER ./input '(x)(x)x' +stderr: +340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ... +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: 'x' (6@60-69) +thing (6@60-69): 'x' (6@60-69) +sending: END (7@70-79) +70.70-79.78: syntax error, unexpected END, expecting 'x' +Freeing nterm thing (6@60-69) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Freeing token END (7@70-79) +Parsing FAILED. +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1048: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' stderr: sending: '(' (0@0-9) @@ -9847,27 +9718,198 @@ Freeing nterm line (0@0-29) Parsing FAILED (status 2). ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:1047: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' +stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: '(' (6@60-69) +sending: 'x' (7@70-79) +thing (7@70-79): 'x' (7@70-79) +sending: ')' (8@80-89) +line (6@60-89): '(' (6@60-69) thing (7@70-79) ')' (8@80-89) +sending: '(' (9@90-99) +sending: 'x' (10@100-109) +thing (10@100-109): 'x' (10@100-109) +sending: ')' (11@110-119) +line (9@90-119): '(' (9@90-99) thing (10@100-109) ')' (11@110-119) +sending: '(' (12@120-129) +sending: 'x' (13@130-139) +thing (13@130-139): 'x' (13@130-139) +sending: ')' (14@140-149) +line (12@120-149): '(' (12@120-129) thing (13@130-139) ')' (14@140-149) +sending: '(' (15@150-159) +sending: 'x' (16@160-169) +thing (16@160-169): 'x' (16@160-169) +sending: ')' (17@170-179) +line (15@150-179): '(' (15@150-159) thing (16@160-169) ')' (17@170-179) +sending: '(' (18@180-189) +sending: 'x' (19@190-199) +thing (19@190-199): 'x' (19@190-199) +sending: ')' (20@200-209) +200.200-209.208: memory exhausted +Freeing nterm thing (19@190-199) +Freeing nterm line (15@150-179) +Freeing nterm line (12@120-149) +Freeing nterm line (9@90-119) +Freeing nterm line (6@60-89) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Parsing FAILED (status 2). +./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 336. actions.at:1048: ok +335. actions.at:1047: ok + +341. actions.at:1056: testing Printers and Destructors: %header glr.cc ... +./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./headers.at:323: $CC $CFLAGS $CPPFLAGS -c -o x4.o x4.c +342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ... +./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ... +./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./actions.at:1053: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ... -./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./actions.at:1054: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./actions.at:374: $PREPARSER ./input +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: +stdout: +306. headers.at:187: ok + +./actions.at:1056: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +344. actions.at:1060: testing Printers and Destructors with union: %header glr2.cc ... +./actions.at:1060: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:1057: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1060: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: +stdout: +stdout: +./actions.at:372: $PREPARSER ./input stderr: 1.1 1.1: syntax error -./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -326. actions.at:374: ok +287. synclines.at:440: ok +./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +324. actions.at:372: ok -341. actions.at:1056: testing Printers and Destructors: %header glr.cc ... -./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1056: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +345. actions.at:1071: testing Default tagless %printer and %destructor ... +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +346. actions.at:1174: testing Default tagged and per-type %printer and %destructor ... +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: -./actions.at:1050: $PREPARSER ./input '(x)' +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +stderr: +stdout: +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: +stdout: +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: +stdout: +308. headers.at:189: ok +stderr: +stdout: +./actions.at:478: $PREPARSER ./input +stderr: + +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +331. actions.at:478: ok +stderr: +input.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] +input.y:30.3-5: error: useless %printer for type <*> [-Werror=other] + +./actions.at:1116: sed 's,.*/$,,' stderr 1>&2 +stderr: +stdout: +347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ... +./actions.at:373: $PREPARSER ./input +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input0.c input0.y +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +stderr: +1.1 +1.1: syntax error +./actions.at:373: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +325. actions.at:373: ok +stderr: +348. actions.at:1429: testing Default %printer and %destructor are not for error or $undefined ... +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +input.y:22.3-4: error: useless %destructor for type <> [-Werror=other] +input.y:22.3-4: error: useless %printer for type <> [-Werror=other] + +./actions.at:1233: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Werror +349. actions.at:1532: testing Default %printer and %destructor are not for $accept ... +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] +input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other] +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./actions.at:1416: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=error +stderr: +input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other] +input.y:23.6-8: error: useless %printer for type <*> [-Werror=other] +./actions.at:1474: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +stderr: +input.y:24.3-4: error: useless %destructor for type <> [-Werror=other] +input.y:24.3-4: error: useless %printer for type <> [-Werror=other] +./actions.at:1582: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1120: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +stderr: +./actions.at:1237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./actions.at:478: $PREPARSER ./input +stderr: +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input0 input0.c $LIBS +332. actions.at:478: ok + +./actions.at:1586: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +350. actions.at:1596: testing Default %printer and %destructor for midrule values ... +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +stderr: +input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] +input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] +input.y:33.3-23: error: unset value: $$ [-Werror=other] +input.y:32.3-23: error: unused value: $3 [-Werror=other] +./actions.at:1634: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +stdout: +./actions.at:1051: $PREPARSER ./input '(x)' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9880,8 +9922,8 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1050: $PREPARSER ./input '!' +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1051: $PREPARSER ./input '!' stderr: sending: '!' (0@0-9) sending: END (1@10-19) @@ -9890,8 +9932,10 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1050: $PREPARSER ./input '!!!' +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1051: $PREPARSER ./input '!!!' +stderr: stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -9902,9 +9946,14 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1050: $PREPARSER ./input '(y)' +stdout: +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: +./actions.at:1051: $PREPARSER ./input '(y)' +stdout: +stderr: +./headers.at:323: echo "x4" >>expout sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -9917,9 +9966,27 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1050: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x5.cc x5.y +stdout: +./actions.at:1050: $PREPARSER ./input '(x)' +./actions.at:1051: $PREPARSER ./input '(xxxxx)(x)(x)y' +stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. stderr: +stderr: +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9955,9 +10022,20 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1050: $PREPARSER ./input '(x)(x)x' +stdout: +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +349. actions.at:1532: ok +./actions.at:1051: $PREPARSER ./input '(x)(x)x' +./actions.at:1050: $PREPARSER ./input '!' +stderr: stderr: +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9977,43 +10055,14 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. +./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -337. actions.at:1050: ok - -342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ... -./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1057: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./headers.at:328: echo "x9" >>expout -./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xa.cc xa.y stderr: stdout: -./actions.at:1051: $PREPARSER ./input '(x)' -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1051: $PREPARSER ./input '!' -stderr: -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1051: $PREPARSER ./input '!!!' + +338. actions.at:1051: ok +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:1050: $PREPARSER ./input '!!!' stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -10024,8 +10073,10 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1051: $PREPARSER ./input '(y)' +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +./actions.at:1050: $PREPARSER ./input '(y)' stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -10039,8 +10090,8 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1051: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1050: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10077,9 +10128,12 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:329: $CXX $CPPFLAGS $CXXFLAGS -c -o xa.o xa.cc -./actions.at:1051: $PREPARSER ./input '(x)(x)x' +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +351. actions.at:1743: testing @$ in %initial-action implies %locations ... +352. actions.at:1744: testing @$ in %destructor implies %locations ... +./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1050: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10100,15 +10154,443 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -338. actions.at:1051: ok +./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:324: $CXX $CPPFLAGS $CXXFLAGS -c -o x5.o x5.cc +337. actions.at:1050: ok +stderr: +stdout: +./actions.at:1479: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token 'a' ('a') +Shifting token 'a' ('a') +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token 'b' ('b') +syntax error +Shifting token error () +Entering state 3 +Stack now 0 1 3 +Next token is token 'b' ('b') +Shifting token 'b' ('b') +Entering state 5 +Stack now 0 1 3 5 +Reading a token +Next token is token "invalid token" () +Error: popping token 'b' ('b') +DESTROY 'b' +Stack now 0 1 3 +Error: popping token error () +Stack now 0 1 +Shifting token error () +Entering state 3 +Stack now 0 1 3 +Next token is token "invalid token" () +Error: discarding token "invalid token" () +Error: popping token error () +Stack now 0 1 +Shifting token error () +Entering state 3 +Stack now 0 1 3 +Reading a token +Now at end of input. +Cleanup: discarding lookahead token "end of file" () +Stack now 0 1 3 +Cleanup: popping token error () +Cleanup: popping token 'a' ('a') +DESTROY 'a' +./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ... -./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +348. actions.at:1429: ok +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./actions.at:1743: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./actions.at:1744: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +353. actions.at:1745: testing @$ in %printer implies %locations ... +./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stdout: +./actions.at:1416: $PREPARSER ./input0 --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 49): +-> $$ = nterm start (1.1: <> for 'S' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Now at end of input. +Shifting token END (1.1: <> for 'E' @ 1) +Entering state 2 +Stack now 0 1 2 +Stack now 0 1 2 +Cleanup: popping token END (1.1: <> for 'E' @ 1) +Cleanup: popping nterm start (1.1: <> for 'S' @ 1) +./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +354. actions.at:1856: testing Qualified $$ in actions: yacc.c ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input1.c input1.y +stderr: +stdout: +./actions.at:1238: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token 'a' (<*>//e printer) +Shifting token 'a' (<*>//e printer) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token 'b' ( printer) +Shifting token 'b' ( printer) +Entering state 3 +Stack now 0 1 3 +Reading a token +Next token is token 'c' ('c' printer) +Shifting token 'c' ('c' printer) +Entering state 5 +Stack now 0 1 3 5 +Reading a token +Next token is token 'd' ('d' printer) +Shifting token 'd' ('d' printer) +Entering state 6 +Stack now 0 1 3 5 6 +Reading a token +Next token is token 'e' (<*>//e printer) +Shifting token 'e' (<*>//e printer) +Entering state 7 +Stack now 0 1 3 5 6 7 +Reading a token +Next token is token 'f' (<*>//e printer) +Shifting token 'f' (<*>//e printer) +Entering state 8 +Stack now 0 1 3 5 6 7 8 +Reading a token +Now at end of input. +syntax error, unexpected end of file, expecting 'g' +Error: popping token 'f' (<*>//e printer) +Stack now 0 1 3 5 6 7 +Error: popping token 'e' (<*>//e printer) +Stack now 0 1 3 5 6 +Error: popping token 'd' ('d' printer) +Stack now 0 1 3 5 +Error: popping token 'c' ('c' printer) +Stack now 0 1 3 +Error: popping token 'b' ( printer) +Stack now 0 1 +Error: popping token 'a' (<*>//e printer) +Stack now 0 +Cleanup: discarding lookahead token "end of file" () +Stack now 0 +./actions.at:1238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1745: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] + 24 | %printer { #error "<*> printer should not be used" } <*> + | ^~~ +input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] + 24 | %printer { #error "<*> printer should not be used" } <*> + | ^~~ +input.y:33.3-23: error: unset value: $$ [-Werror=other] + 33 | { @$ = 4; } // Only used. + | ^~~~~~~~~~~~~~~~~~~~~ +input.y:32.3-23: error: unused value: $3 [-Werror=other] + 32 | { USE ($$); @$ = 3; } // Only set. + | ^~~~~~~~~~~~~~~~~~~~~ +346. actions.at:1174: ok +./actions.at:1641: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +stderr: +stdout: +./actions.at:1121: $PREPARSER ./input --debug +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token 'a' (1.1: <> printer for 'a' @ 1) +Shifting token 'a' (1.1: <> printer for 'a' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Shifting token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Entering state 3 +Stack now 0 1 3 +Reading a token +Next token is token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Shifting token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Entering state 5 +Stack now 0 1 3 5 +Reading a token +Next token is token 'd' (1.4: <> printer for 'd' @ 4) +Shifting token 'd' (1.4: <> printer for 'd' @ 4) +Entering state 6 +Stack now 0 1 3 5 6 +Reading a token +Now at end of input. +1.5: syntax error, unexpected end of file, expecting 'e' +Error: popping token 'd' (1.4: <> printer for 'd' @ 4) +Stack now 0 1 3 5 +Error: popping token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) +Stack now 0 1 3 +Error: popping token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Stack now 0 1 +Error: popping token 'a' (1.1: <> printer for 'a' @ 1) +Stack now 0 +Cleanup: discarding lookahead token "end of file" (1.5: ) +Stack now 0 +./actions.at:1121: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +345. actions.at:1071: ok +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Werror +355. actions.at:1856: testing Qualified $$ in actions: glr.c ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + +356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other] +input1.y:30.3-4: error: useless %printer for type <> [-Werror=other] +./actions.at:1417: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=error +./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none +./actions.at:1656: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1417: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input1 input1.c $LIBS +stderr: +stdout: +352. actions.at:1744: ok +stderr: +stdout: +./actions.at:1856: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) + +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed -ne '/ival:/p' stderr +stderr: +stdout: +354. actions.at:1856: ok +351. actions.at:1743: ok + + +357. actions.at:1856: testing Qualified $$ in actions: glr.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +353. actions.at:1745: ok +358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ... +359. actions.at:1863: testing Destroying lookahead assigned by semantic action ... +./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +360. actions.at:1918: testing YYBACKUP ... +./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./actions.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +309. headers.at:191: ok + +./actions.at:1954: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +361. types.at:25: testing %union vs. api.value.type ... +./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +361. types.at:25: stderr: + ok +stdout: +307. headers.at:188: ok + + +362. types.at:44: testing %yacc vs. api.value.type=union ... +./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +363. types.at:139: testing yacc.c api.value.type={double} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +stdout: +./actions.at:1657: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 30): +-> $$ = nterm $@1 (: ) +Entering state 2 +Stack now 0 2 +Reducing stack by rule 2 (line 31): +-> $$ = nterm @2 (: 2) +Entering state 4 +Stack now 0 2 4 +Reducing stack by rule 3 (line 32): +-> $$ = nterm @3 (: 3) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 4 (line 33): +-> $$ = nterm @4 (: 4) +Entering state 6 +Stack now 0 2 4 5 6 +Reading a token +Now at end of input. +syntax error +Error: popping nterm @4 (: 4) +DESTROY 4 +Stack now 0 2 4 5 +Error: popping nterm @3 (: 3) +DESTROY 3 +Stack now 0 2 4 +Error: popping nterm @2 (: 2) +DESTROY 2 +Stack now 0 2 +Error: popping nterm $@1 (: ) +Stack now 0 +Cleanup: discarding lookahead token "end of file" (: ) +Stack now 0 +./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +350. actions.at:1596: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + +362. types.at:44: ok + +364. types.at:139: testing yacc.c api.value.type={double} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +365. types.at:139: testing yacc.c api.value.type={variant} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stdout: +stderr: +stdout: +./actions.at:1907: $PREPARSER ./input +./actions.at:1417: $PREPARSER ./input1 --debug +stderr: +stderr: +'b' destructor +'a' destructor +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 49): +-> $$ = nterm start (1.1: <*> for 'S' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Now at end of input. +Shifting token END (1.1: <*> for 'E' @ 1) +Entering state 2 +Stack now 0 1 2 +Stack now 0 1 2 +Cleanup: popping token END (1.1: <*> for 'E' @ 1) +Cleanup: popping nterm start (1.1: <*> for 'S' @ 1) +./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +359. actions.at:1863: ok +347. actions.at:1307: ok + + +366. types.at:139: testing yacc.c api.value.type={variant} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +367. types.at:139: testing yacc.c api.value.type={struct foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +363. types.at:139: ok +stderr: +stdout: +./actions.at:1053: $PREPARSER ./input '(x)' + +stderr: stderr: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./actions.at:1054: $PREPARSER ./input '(x)' +./actions.at:1053: $PREPARSER ./input '!' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10122,7 +10604,18 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1054: $PREPARSER ./input '!' +./actions.at:1053: $PREPARSER ./input '!!!' +stderr: stderr: sending: '!' (0@0-9) sending: END (1@10-19) @@ -10132,8 +10625,6 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '!!!' -stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -10143,8 +10634,10 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '(y)' +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1053: $PREPARSER ./input '(y)' +./actions.at:1054: $PREPARSER ./input '!!!' +stderr: stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -10158,8 +10651,20 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) +Successful parse. ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1053: $PREPARSER ./input '(xxxxx)(x)(x)y' +368. types.at:139: testing yacc.c api.value.type={struct foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10196,9 +10701,12 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $PREPARSER ./input '(x)(x)x' +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1053: $PREPARSER ./input '(x)(x)x' stderr: +stdout: +stderr: +./actions.at:1955: $PREPARSER ./input sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10218,75 +10726,50 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -340. actions.at:1054: ok - -344. actions.at:1060: testing Printers and Destructors with union: %header glr2.cc ... -./actions.at:1060: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1060: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./actions.at:478: $PREPARSER ./input -stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -333. actions.at:478: ok - -345. actions.at:1071: testing Default tagless %printer and %destructor ... -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./actions.at:1053: $PREPARSER ./input '(x)' +./actions.at:1054: $PREPARSER ./input '(y)' +./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) sending: END (3@30-39) input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) +input (2@0-29): line (-1@0-29) input (0@29-29) Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '!' -stderr: -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '!!!' +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +339. actions.at:1053: ok +360. actions.at:1918: ok + + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +369. types.at:139: testing yacc.c api.value.type={struct bar} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +370. types.at:139: testing yacc.c api.value.type={struct bar} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '(y)' +stdout: +./actions.at:1057: $PREPARSER ./input '(x)' +./actions.at:1054: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) sending: END (3@30-39) input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) +input (2@0-29): line (0@0-29) input (0@29-29) Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10323,10 +10806,19 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '(x)(x)x' +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1057: $PREPARSER ./input '!' +stderr: +./actions.at:1054: $PREPARSER ./input '(x)(x)x' +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. +./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10346,54 +10838,8 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -339. actions.at:1053: ok - -346. actions.at:1174: testing Default tagged and per-type %printer and %destructor ... -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -input.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] -input.y:30.3-5: error: useless %printer for type <*> [-Werror=other] -./actions.at:1116: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -input.y:22.3-4: error: useless %destructor for type <> [-Werror=other] -input.y:22.3-4: error: useless %printer for type <> [-Werror=other] -./actions.at:1233: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1120: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./actions.at:1057: $PREPARSER ./input '(x)' -stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1057: $PREPARSER ./input '!' -stderr: -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./actions.at:1057: $PREPARSER ./input '!!!' stderr: sending: '!' (0@0-9) @@ -10406,7 +10852,36 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +340. actions.at:1054: ok +stderr: +stdout: +./actions.at:1856: $PREPARSER ./input --debug ./actions.at:1057: $PREPARSER ./input '(y)' +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -10421,8 +10896,34 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1057: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed -ne '/ival:/p' stderr +stderr: +./actions.at:1057: $PREPARSER ./input '(xxxxx)(x)(x)y' +stdout: +355. actions.at:1856: stderr: + ok sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10459,6 +10960,8 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +stderr: ./actions.at:1057: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) @@ -10480,15 +10983,117 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. + ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +371. types.at:139: testing yacc.c api.value.type={union foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 342. actions.at:1057: ok +365. types.at:139: ok +stderr: +stdout: + +./types.at:139: $PREPARSER ./test + +stderr: +372. types.at:139: testing yacc.c api.value.type={union foo} %header ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +364. types.at:139: ok +373. types.at:139: testing yacc.c %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +366. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + +367. types.at:139: ok + +376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ... +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stdout: +./actions.at:374: $PREPARSER ./input +stderr: +1.1 +1.1: syntax error +./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +326. actions.at:374: 377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + ok +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: H i . J K $end + Shift derivation + $accept + `-> 0: a $end + `-> 2: H i + `-> 4: i . J K + Second example: H i . J $end + Reduce derivation + $accept + `-> 0: s $end + `-> 1: a J + `-> 2: H i . +input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +368. types.at:139: ok + +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +369. types.at:139: ok +379. types.at:139: testing yacc.c api.value.type=union ... +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stdout: +./types.at:139: $PREPARSER ./test -347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ... -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input0.c input0.y -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Werror +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +370. types.at:139: ok +380. types.at:139: testing yacc.c api.value.type=union %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: ./actions.at:1056: $PREPARSER ./input '(x)' +381. types.at:139: testing glr.c api.value.type={double} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10501,8 +11106,13 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. +stderr: ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./actions.at:478: $PREPARSER ./input +stderr: ./actions.at:1056: $PREPARSER ./input '!' +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '!' (0@0-9) sending: END (1@10-19) @@ -10512,6 +11122,7 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +333. actions.at:478: ok ./actions.at:1056: $PREPARSER ./input '!!!' stderr: sending: '!' (0@0-9) @@ -10524,9 +11135,15 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./actions.at:1056: $PREPARSER ./input '(y)' stderr: stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -10539,12 +11156,11 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] -input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other] ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +371. types.at:139: ok ./actions.at:1056: $PREPARSER ./input '(xxxxx)(x)(x)y' + stderr: -./actions.at:1416: sed 's,.*/$,,' stderr 1>&2 sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10581,8 +11197,14 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=error +stderr: +382. types.at:139: testing glr.c api.value.type={double} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stdout: +./types.at:139: $PREPARSER ./test ./actions.at:1056: $PREPARSER ./input '(x)(x)x' +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10604,614 +11226,421 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +383. types.at:139: testing glr.c api.value.type={variant} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: 341. actions.at:1056: ok +stdout: +./types.at:139: $PREPARSER ./test +372. types.at:139: ok +stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +373. types.at:139: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok + + +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +375. types.at:139: ok +374. types.at:139: ok + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +385. types.at:139: testing glr.c api.value.type={struct foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + +384. types.at:139: testing glr.c api.value.type={variant} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +386. types.at:139: testing glr.c api.value.type={struct foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +387. types.at:139: testing glr.c api.value.type={struct bar} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +388. types.at:139: testing glr.c api.value.type={struct bar} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none stdout: -./actions.at:1238: $PREPARSER ./input --debug +./types.at:139: $PREPARSER ./test +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +376. types.at:139: ok + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stdout: +stderr: +./actions.at:1856: $PREPARSER ./input --debug +stdout: +./types.at:139: $PREPARSER ./test stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token 'a' (<*>//e printer) -Shifting token 'a' (<*>//e printer) +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 Stack now 0 1 Reading a token -Next token is token 'b' ( printer) -Shifting token 'b' ( printer) +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) Entering state 3 Stack now 0 1 3 +Reducing stack by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 Reading a token -Next token is token 'c' ('c' printer) -Shifting token 'c' ('c' printer) -Entering state 5 -Stack now 0 1 3 5 -Reading a token -Next token is token 'd' ('d' printer) -Shifting token 'd' ('d' printer) -Entering state 6 -Stack now 0 1 3 5 6 -Reading a token -Next token is token 'e' (<*>//e printer) -Shifting token 'e' (<*>//e printer) -Entering state 7 -Stack now 0 1 3 5 6 7 -Reading a token -Next token is token 'f' (<*>//e printer) -Shifting token 'f' (<*>//e printer) -Entering state 8 -Stack now 0 1 3 5 6 7 8 -Reading a token -Now at end of input. -syntax error, unexpected end of file, expecting 'g' -Error: popping token 'f' (<*>//e printer) -Stack now 0 1 3 5 6 7 -Error: popping token 'e' (<*>//e printer) -Stack now 0 1 3 5 6 -Error: popping token 'd' ('d' printer) -Stack now 0 1 3 5 -Error: popping token 'c' ('c' printer) -Stack now 0 1 3 -Error: popping token 'b' ( printer) -Stack now 0 1 -Error: popping token 'a' (<*>//e printer) -Stack now 0 -Cleanup: discarding lookahead token "end of file" () -Stack now 0 -348. actions.at:1429: testing Default %printer and %destructor are not for error or $undefined ... -./actions.at:1238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -346. actions.at:1174: ok - -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none -349. actions.at:1532: testing Default %printer and %destructor are not for $accept ... -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +Next token is token "end of file" () +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +stderr: +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +389. types.at:139: testing glr.c api.value.type={union foo} ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: -./actions.at:1121: $PREPARSER ./input --debug stderr: +378. types.at:139: ok Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token 'a' (1.1: <> printer for 'a' @ 1) -Shifting token 'a' (1.1: <> printer for 'a' @ 1) +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 Stack now 0 1 Reading a token -Next token is token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) -Shifting token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) Entering state 3 Stack now 0 1 3 +Reducing stack by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 Reading a token -Next token is token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Shifting token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Entering state 5 -Stack now 0 1 3 5 -Reading a token -Next token is token 'd' (1.4: <> printer for 'd' @ 4) -Shifting token 'd' (1.4: <> printer for 'd' @ 4) -Entering state 6 -Stack now 0 1 3 5 6 -Reading a token -Now at end of input. -1.5: syntax error, unexpected end of file, expecting 'e' -Error: popping token 'd' (1.4: <> printer for 'd' @ 4) -Stack now 0 1 3 5 -Error: popping token 'c' (1.3: 'b'/'c' printer for 'c' @ 3) -Stack now 0 1 3 -Error: popping token 'b' (1.2: 'b'/'c' printer for 'b' @ 2) -Stack now 0 1 -Error: popping token 'a' (1.1: <> printer for 'a' @ 1) -Stack now 0 -Cleanup: discarding lookahead token "end of file" (1.5: ) -Stack now 0 -./actions.at:1121: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -345. actions.at:1071: ok -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +Next token is token "end of file" () +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed -ne '/ival:/p' stderr +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +356. actions.at:1856: ok +377. types.at:139: ok + + stderr: stdout: -./headers.at:329: echo "xa" >>expout -./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xb.cc xb.y -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -350. actions.at:1596: testing Default %printer and %destructor for midrule values ... -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input0 input0.c $LIBS +./headers.at:324: echo "x5" >>expout +390. types.at:139: testing glr.c api.value.type={union foo} %header ... +./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x6.c x6.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +391. types.at:139: testing glr.c %union { float fval; int ival; }; ... +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other] -input.y:23.6-8: error: useless %printer for type <*> [-Werror=other] -./actions.at:1474: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +stdout: +./types.at:139: $PREPARSER ./test stderr: -input.y:24.3-4: error: useless %destructor for type <> [-Werror=other] -input.y:24.3-4: error: useless %printer for type <> [-Werror=other] -./actions.at:1582: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./headers.at:330: $CXX $CPPFLAGS $CXXFLAGS -c -o xb.o xb.cc -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +379. types.at:139: ok stderr: -input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] -input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] -input.y:33.3-23: error: unset value: $$ [-Werror=other] -input.y:32.3-23: error: unused value: $3 [-Werror=other] -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1634: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +stdout: + +./types.at:139: $PREPARSER ./test stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example: . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: . c A A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 4: %empty . `-> 6: c A A -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: b . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: b . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 6: c A - `-> 4: %empty . -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: c . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: c . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: b c . A - Shift derivation - a - `-> 1: b d - `-> 6: c . A - Second example: b c . c A A $end - Reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example: b c . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: b c . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example: b c . A - Shift derivation - a - `-> 1: b d - `-> 6: c . A - Second example: b c . A $end - Reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: b d . - First reduce derivation - a - `-> 1: b d . - Second reduce derivation - a - `-> 1: b d - `-> 7: d . -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: c d . - First reduce derivation - a - `-> 2: c d . - Second reduce derivation - a - `-> 2: c d - `-> 7: d . -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1586: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -stderr: -input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] - 24 | %printer { #error "<*> printer should not be used" } <*> - | ^~~ -input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] - 24 | %printer { #error "<*> printer should not be used" } <*> - | ^~~ -input.y:33.3-23: error: unset value: $$ [-Werror=other] - 33 | { @$ = 4; } // Only used. - | ^~~~~~~~~~~~~~~~~~~~~ -input.y:32.3-23: error: unused value: $3 [-Werror=other] - 32 | { USE ($$); @$ = 3; } // Only set. - | ^~~~~~~~~~~~~~~~~~~~~ -./actions.at:1641: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -stderr: -stdout: -./actions.at:1416: $PREPARSER ./input0 --debug -stderr: -stderr: -stdout: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 49): --> $$ = nterm start (1.1: <> for 'S' @ 1) -Entering state 1 -Stack now 0 1 -Reading a token -Now at end of input. -Shifting token END (1.1: <> for 'E' @ 1) -Entering state 2 -Stack now 0 1 2 -Stack now 0 1 2 -Cleanup: popping token END (1.1: <> for 'E' @ 1) -Cleanup: popping nterm start (1.1: <> for 'S' @ 1) -./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -349. actions.at:1532: ok -stderr: -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input1.c input1.y -stdout: -./actions.at:1479: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token 'a' ('a') -Shifting token 'a' ('a') -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token 'b' ('b') -syntax error -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Next token is token 'b' ('b') -Shifting token 'b' ('b') -Entering state 5 -Stack now 0 1 3 5 -Reading a token -Next token is token "invalid token" () -Error: popping token 'b' ('b') -DESTROY 'b' -Stack now 0 1 3 -Error: popping token error () -Stack now 0 1 -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Next token is token "invalid token" () -Error: discarding token "invalid token" () -Error: popping token error () -Stack now 0 1 -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Reading a token -Now at end of input. -Cleanup: discarding lookahead token "end of file" () -Stack now 0 1 3 -Cleanup: popping token error () -Cleanup: popping token 'a' ('a') -DESTROY 'a' -./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -348. actions.at:1429: ok - -351. actions.at:1743: testing @$ in %initial-action implies %locations ... -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -352. actions.at:1744: testing @$ in %destructor implies %locations ... -./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Werror -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1743: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1744: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other] -input1.y:30.3-4: error: useless %printer for type <> [-Werror=other] -./actions.at:1417: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=error -./actions.at:1656: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none -./actions.at:1417: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input1 input1.c $LIBS -stderr: -stdout: -stderr: -stderr: -stdout: -stdout: -351. actions.at:1743: ok -./actions.at:1657: $PREPARSER ./input --debug -stderr: -352. actions.at:1744: ok -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 30): --> $$ = nterm $@1 (: ) -Entering state 2 -Stack now 0 2 -Reducing stack by rule 2 (line 31): --> $$ = nterm @2 (: 2) -Entering state 4 -Stack now 0 2 4 -Reducing stack by rule 3 (line 32): --> $$ = nterm @3 (: 3) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 4 (line 33): --> $$ = nterm @4 (: 4) -Entering state 6 -Stack now 0 2 4 5 6 -Reading a token -Now at end of input. -syntax error -Error: popping nterm @4 (: 4) -DESTROY 4 -Stack now 0 2 4 5 -Error: popping nterm @3 (: 3) -DESTROY 3 -Stack now 0 2 4 -Error: popping nterm @2 (: 2) -DESTROY 2 -Stack now 0 2 -Error: popping nterm $@1 (: ) -Stack now 0 -Cleanup: discarding lookahead token "end of file" (: ) -Stack now 0 -./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -350. actions.at:1596: ok - +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:325: $CC $CFLAGS $CPPFLAGS -c -o x6.o x6.c +380. types.at:139: ok -353. actions.at:1745: testing @$ in %printer implies %locations ... -./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -354. actions.at:1856: testing Qualified $$ in actions: yacc.c ... -355. actions.at:1856: testing Qualified $$ in actions: glr.c ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1745: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: -./actions.at:1417: $PREPARSER ./input1 --debug +./headers.at:325: echo "x6" >>expout +./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x7.c x7.y +./headers.at:326: $CC $CFLAGS $CPPFLAGS -c -o x7.o x7.c stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 49): --> $$ = nterm start (1.1: <*> for 'S' @ 1) -Entering state 1 -Stack now 0 1 -Reading a token -Now at end of input. -Shifting token END (1.1: <*> for 'E' @ 1) -Entering state 2 -Stack now 0 1 2 -Stack now 0 1 2 -Cleanup: popping token END (1.1: <*> for 'E' @ 1) -Cleanup: popping nterm start (1.1: <*> for 'S' @ 1) -./actions.at:1417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -347. actions.at:1307: stderr: - ok stdout: ./actions.at:1856: $PREPARSER ./input --debug stderr: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token UNTYPED (ival: 10, fval: 0.1) Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 -Stack now 0 1 Reading a token Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 53): +Reducing stack 0 by rule 1 (line 55): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) Entering state 2 -Stack now 0 2 Reading a token Now at end of input. Shifting token "end of file" () Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token UNTYPED (ival: 10, fval: 0.1) Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 -Stack now 0 1 Reading a token Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 53): +Reducing stack 0 by rule 1 (line 55): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) Entering state 2 -Stack now 0 2 Reading a token Now at end of input. Shifting token "end of file" () Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed -ne '/ival:/p' stderr -354. actions.at:1856: ok -356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +357. actions.at:1856: ok +395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: -353. actions.at:1745: ok +./headers.at:326: echo "x7" >>expout +./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x8.c x8.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./headers.at:327: $CC $CFLAGS $CPPFLAGS -c -o x8.o x8.c +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +381. types.at:139: ok +stderr: +stdout: +stderr: +./types.at:139: $PREPARSER ./test +stderr: -357. actions.at:1856: testing Qualified $$ in actions: glr.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +383. types.at:139: ok +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +382. types.at:139: ok +396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: -./actions.at:1856: $PREPARSER ./input --debug + +./types.at:139: $PREPARSER ./test stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 53): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +397. types.at:139: testing glr.c api.value.type=union ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 53): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed -ne '/ival:/p' stderr -355. actions.at:1856: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: +./types.at:139: $PREPARSER ./test stdout: -./headers.at:330: echo "xb" >>expout +./types.at:139: $PREPARSER ./test +389. types.at:139: stderr: +stderr: + ok +stdout: +stderr: +398. types.at:139: testing glr.c api.value.type=union %header ... +./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +384. types.at:139: ok +385. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +386. types.at:139: ok + -./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xc.cc xc.y -359. actions.at:1863: testing Destroying lookahead assigned by semantic action ... -./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc -./actions.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:1907: $PREPARSER ./input + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $PREPARSER ./test stderr: -'b' destructor -'a' destructor -./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -359. actions.at:1863: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +388. types.at:139: ok +399. types.at:139: testing lalr1.cc api.value.type={double} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +400. types.at:139: testing lalr1.cc api.value.type={double} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +402. types.at:139: testing lalr1.cc api.value.type={variant} %header ... +401. types.at:139: testing lalr1.cc api.value.type={variant} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +403. types.at:139: testing lalr1.cc api.value.type={struct foo} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +391. types.at:139: ok +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: + +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +387. types.at:139: ok + +404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +405. types.at:139: testing lalr1.cc api.value.type={struct bar} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +390. types.at:139: ok + +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./headers.at:327: echo "x8" >>expout +stderr: +./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x9.cc x9.y +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +393. types.at:139: ok + +stderr: +stdout: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +392. types.at:139: ok +394. types.at:139: ok +407. types.at:139: testing lalr1.cc api.value.type={union foo} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + + +./headers.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o x9.o x9.cc +409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +395. types.at:139: ok +410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: +stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +stderr: +397. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +396. types.at:139: ok stderr: stdout: ./actions.at:1059: $PREPARSER ./input '(x)' + + stderr: -360. actions.at:1918: testing YYBACKUP ... -./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11233,8 +11662,13 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. +stderr: ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./actions.at:1059: $PREPARSER ./input '!!!' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -11246,7 +11680,13 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1954: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +411. types.at:139: testing lalr1.cc api.value.type=union ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +398. types.at:139: ok +412. types.at:139: testing lalr1.cc api.value.type=union %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./actions.at:1059: $PREPARSER ./input '(y)' stderr: sending: '(' (0@0-9) @@ -11300,6 +11740,7 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ./actions.at:1059: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) @@ -11323,217 +11764,17 @@ Parsing FAILED. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 343. actions.at:1059: ok +413. types.at:139: testing lalr1.cc api.value.type=variant ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -361. types.at:25: testing %union vs. api.value.type ... -./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -361. types.at:25: ok - -362. types.at:44: testing %yacc vs. api.value.type=union ... -./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -362. types.at:44: ok - -stderr: -stdout: -./actions.at:1955: $PREPARSER ./input -stderr: -./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -360. actions.at:1918: ok -363. types.at:139: testing yacc.c api.value.type={double} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -364. types.at:139: testing yacc.c api.value.type={double} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./actions.at:1856: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token "end of file" () -Shifting token "end of file" () -Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token "end of file" () -Shifting token "end of file" () -Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed -ne '/ival:/p' stderr -356. actions.at:1856: ok - -365. types.at:139: testing yacc.c api.value.type={variant} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -364. types.at:139: ok -stdout: - -./types.at:139: $PREPARSER ./test -stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -./actions.at:1856: $PREPARSER ./input --debug -365. types.at:139: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -366. types.at:139: testing yacc.c api.value.type={variant} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -363. types.at:139: ok -./actions.at:1856: sed -ne '/ival:/p' stderr - -357. actions.at:1856: ok - - -367. types.at:139: testing yacc.c api.value.type={struct foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -368. types.at:139: testing yacc.c api.value.type={struct foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -369. types.at:139: testing yacc.c api.value.type={struct bar} ... -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -366. types.at:139: ok - -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -370. types.at:139: testing yacc.c api.value.type={struct bar} %header ... -369. types.at:139: ok -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -371. types.at:139: testing yacc.c api.value.type={union foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -368. types.at:139: ok -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -367. types.at:139: ok -372. types.at:139: testing yacc.c api.value.type={union foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -373. types.at:139: testing yacc.c %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +414. types.at:139: testing lalr1.cc api.value.type=variant %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./actions.at:1060: $PREPARSER ./input '(x)' @@ -11550,7 +11791,6 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./actions.at:1060: $PREPARSER ./input '!' stderr: sending: '!' (0@0-9) @@ -11650,121 +11890,20 @@ ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 344. actions.at:1060: ok -374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -370. types.at:139: ok - -375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -371. types.at:139: ok - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -372. types.at:139: ok -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: - -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -373. types.at:139: ok - -377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -374. types.at:139: ok - -379. types.at:139: testing yacc.c api.value.type=union ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -376. types.at:139: ok - -380. types.at:139: testing yacc.c api.value.type=union %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -375. types.at:139: ok - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -377. types.at:139: ok -381. types.at:139: testing glr.c api.value.type={double} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -382. types.at:139: testing glr.c api.value.type={double} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -378. types.at:139: ok - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -383. types.at:139: testing glr.c api.value.type={variant} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -379. types.at:139: ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - ok - -384. types.at:139: testing glr.c api.value.type={variant} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +415. types.at:139: testing lalr1.cc api.value.type=variant ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -380. types.at:139: ok - -385. types.at:139: testing glr.c api.value.type={struct foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./actions.at:1856: $PREPARSER ./input --debug stderr: Starting parse @@ -11814,359 +11953,244 @@ ./actions.at:1856: sed -ne '/ival:/p' stderr 358. actions.at:1856: ok -386. types.at:139: testing glr.c api.value.type={struct foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./headers.at:331: echo "xc" >>expout -./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xd.cc xd.y -stderr: -./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -383. types.at:139: ok - -387. types.at:139: testing glr.c api.value.type={struct bar} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -382. types.at:139: ok - -388. types.at:139: testing glr.c api.value.type={struct bar} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +416. types.at:139: testing lalr1.cc api.value.type=variant %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -384. types.at:139: ok +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: - stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -381. types.at:139: ok -389. types.at:139: testing glr.c api.value.type={union foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -390. types.at:139: testing glr.c api.value.type={union foo} %header ... -385. types.at:139: ok -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - -391. types.at:139: testing glr.c %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -386. types.at:139: ok - -392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./headers.at:328: echo "x9" >>expout +./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xa.cc xa.y stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -387. types.at:139: ok -388. types.at:139: ok - - -393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -389. types.at:139: ok - +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -390. types.at:139: ok - +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./headers.at:329: $CXX $CPPFLAGS $CXXFLAGS -c -o xa.o xa.cc +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: -396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -391. types.at:139: ok - -397. types.at:139: testing glr.c api.value.type=union ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -392. types.at:139: ok - -398. types.at:139: testing glr.c api.value.type=union %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -394. types.at:139: ok - -399. types.at:139: testing lalr1.cc api.value.type={double} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -393. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - -400. types.at:139: testing lalr1.cc api.value.type={double} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -395. types.at:139: ok - stderr: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -396. types.at:139: ok -401. types.at:139: testing lalr1.cc api.value.type={variant} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -402. types.at:139: testing lalr1.cc api.value.type={variant} %header ... ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -397. types.at:139: ok - +stderr: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -403. types.at:139: testing lalr1.cc api.value.type={struct foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -./headers.at:332: echo "xd" >>expout -./headers.at:342: "$PERL" -n -0777 -e ' - # Ignore comments. - s{/\*.*?\*/}{}gs; - s{//.*}{}g; - # Ignore warnings. - s{# *pragma .* message ".*"}{}g; - - s{\b((defined|if)\ YYDEBUG - |YYChar # Template parameter. - |YYNTOKENS # This is actually scoped in a C++ class. - |YYPUSH_MORE(?:_DEFINED)? - |S_(YY(ACCEPT|EMPTY|EOF|error|UNDEF)) # These guys are scoped. - |YY(?:_REINTERPRET)?_CAST - |YY_ATTRIBUTE(?:_PURE|_UNUSED) - |YY_CONSTEXPR - |YY_COPY - |YY_CPLUSPLUS - |YY_IGNORE_(?:MAYBE_UNINITIALIZED|USELESS_CAST)_(?:BEGIN|END) - |YY_INITIAL_VALUE - |YY_MOVE - |YY_MOVE_OR_COPY - |YY_MOVE_REF - |YY_NOEXCEPT - |YY_NOTHROW - |YY_NULLPTR - |YY_RVREF - |YY_USE - |YY_\w+_INCLUDED # Header guards. - |FILE\ \*yyo # Function argument. - |const\ yylocp # Function argument. - )\b}{}gx; - while (/^(.*YY.*)$/gm) - { - print "$ARGV: invalid exported YY: $1\n"; - } - if ($ARGV =~ /\.h$/) - { - while (/^(.*yy.*)$/gm) - { - print "$ARGV: invalid exported yy: $1\n"; - } - } -' -- *.hh *.h -./headers.at:387: $CC $CFLAGS $CPPFLAGS -c -o c-only.o c-only.c +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./headers.at:387: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx-only.o cxx-only.cc stderr: stdout: -./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx || - exit 77 -stderr: stdout: -./headers.at:387: $PREPARSER ./c-and-cxx -stderr: -./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS stderr: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -398. types.at:139: ok - -404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./headers.at:329: echo "xa" >>expout +stderr: +./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xb.cc xb.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./headers.at:394: $PREPARSER ./parser -stderr: -./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -310. headers.at:199: ok - -405. types.at:139: testing lalr1.cc api.value.type={struct bar} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: stderr: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./headers.at:330: $CXX $CPPFLAGS $CXXFLAGS -c -o xb.o xb.cc stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12203,18 +12227,10 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12227,18 +12243,18 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12248,24 +12264,22 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -12273,12 +12287,13 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12286,7 +12301,6 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12295,56 +12309,21 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example . c A A $end - First reduce derivation $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ] - Second example . c A A $end - Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example b . c A A $end - First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] - Second example b . A $end - Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example c . c A A $end - First reduce derivation $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] - Second example c . A $end - Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] time limit exceeded: 6.000000 - First example b c . A - Shift derivation a -> [ b d -> [ c . A ] ] - Second example b c . c A A $end - Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example b c . c A A $end - First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] - Second example b c . A $end - Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example b c . A - Shift derivation a -> [ b d -> [ c . A ] ] - Second example b c . A $end - Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example b d . - First reduce derivation a -> [ b d . ] - Second reduce derivation a -> [ b d -> [ d . ] ] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example c d . - First reduce derivation a -> [ c d . ] - Second reduce derivation a -> [ c d -> [ d . ] ] -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -270. counterexample.at:610: ok + First example H i . J K $end + Shift derivation $accept -> [ a -> [ H i -> [ i . J K ] ] $end ] + Second example H i . J $end + Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ] +input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +267. counterexample.at:441: ok -406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ... +417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12353,25 +12332,30 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: ./types.at:139: $PREPARSER ./test +stdout: +./headers.at:330: echo "xb" >>expout stderr: +./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xc.cc xc.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12379,7 +12363,6 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12388,6 +12371,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12395,6 +12379,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12403,7 +12388,6 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12413,9 +12397,9 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: stderr: stdout: +stdout: ./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: @@ -12426,8 +12410,6 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12439,16 +12421,17 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12457,6 +12440,7 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12464,20 +12448,19 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -12485,6 +12468,7 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -12511,15 +12495,6 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12532,13 +12507,6 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -12547,17 +12515,6 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -399. types.at:139: ok - -407. types.at:139: testing lalr1.cc api.value.type={union foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -12572,9 +12529,6 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -404. types.at:139: ok - -408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: @@ -12582,10 +12536,6 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -401. types.at:139: ok - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12594,58 +12544,34 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -400. types.at:139: ok - -410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -402. types.at:139: ok - -411. types.at:139: testing lalr1.cc api.value.type=union ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -403. types.at:139: ok - -412. types.at:139: testing lalr1.cc api.value.type=union %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: stderr: -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12653,50 +12579,39 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -405. types.at:139: ok - -413. types.at:139: testing lalr1.cc api.value.type=variant ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12718,25 +12633,17 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12763,12 +12670,9 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./headers.at:331: echo "xc" >>expout +./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xd.cc xd.y +./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12785,14 +12689,13 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12801,6 +12704,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12808,7 +12712,6 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12816,7 +12719,6 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12833,6 +12735,8 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12840,42 +12744,38 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -406. types.at:139: ok - -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -414. types.at:139: testing lalr1.cc api.value.type=variant %header ... +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12884,6 +12784,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12899,6 +12800,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12908,14 +12810,136 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example: . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: . c A A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 4: %empty . `-> 6: c A A +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: b . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: b . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 6: c A + `-> 4: %empty . +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: c . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: c . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: b c . A + Shift derivation + a + `-> 1: b d + `-> 6: c . A + Second example: b c . c A A $end + Reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example: b c . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: b c . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example: b c . A + Shift derivation + a + `-> 1: b d + `-> 6: c . A + Second example: b c . A $end + Reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: b d . + First reduce derivation + a + `-> 1: b d . + Second reduce derivation + a + `-> 1: b d + `-> 7: d . +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: c d . + First reduce derivation + a + `-> 2: c d . + Second reduce derivation + a + `-> 2: c d + `-> 7: d . +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12924,26 +12948,19 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -12963,59 +12980,58 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test stderr: stdout: +stderr: ./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -13025,6 +13041,7 @@ stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -13065,36 +13082,82 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -409. types.at:139: ok - -stderr: -415. types.at:139: testing lalr1.cc api.value.type=variant ... -stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +stderr: +./headers.at:332: echo "xd" >>expout +stdout: +./headers.at:342: "$PERL" -n -0777 -e ' + # Ignore comments. + s{/\*.*?\*/}{}gs; + s{//.*}{}g; + # Ignore warnings. + s{# *pragma .* message ".*"}{}g; + + s{\b((defined|if)\ YYDEBUG + |YYChar # Template parameter. + |YYNTOKENS # This is actually scoped in a C++ class. + |YYPUSH_MORE(?:_DEFINED)? + |S_(YY(ACCEPT|EMPTY|EOF|error|UNDEF)) # These guys are scoped. + |YY(?:_REINTERPRET)?_CAST + |YY_ATTRIBUTE(?:_PURE|_UNUSED) + |YY_CONSTEXPR + |YY_COPY + |YY_CPLUSPLUS + |YY_IGNORE_(?:MAYBE_UNINITIALIZED|USELESS_CAST)_(?:BEGIN|END) + |YY_INITIAL_VALUE + |YY_MOVE + |YY_MOVE_OR_COPY + |YY_MOVE_REF + |YY_NOEXCEPT + |YY_NOTHROW + |YY_NULLPTR + |YY_RVREF + |YY_USE + |YY_\w+_INCLUDED # Header guards. + |FILE\ \*yyo # Function argument. + |const\ yylocp # Function argument. + )\b}{}gx; + while (/^(.*YY.*)$/gm) + { + print "$ARGV: invalid exported YY: $1\n"; + } + if ($ARGV =~ /\.h$/) + { + while (/^(.*yy.*)$/gm) + { + print "$ARGV: invalid exported yy: $1\n"; + } + } +' -- *.hh *.h ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./headers.at:387: $CC $CFLAGS $CPPFLAGS -c -o c-only.o c-only.c ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./headers.at:387: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx-only.o cxx-only.cc ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx || + exit 77 stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./headers.at:387: $PREPARSER ./c-and-cxx +stderr: +./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -408. types.at:139: ok - -416. types.at:139: testing lalr1.cc api.value.type=variant %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -13103,217 +13166,200 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -411. types.at:139: ok +402. types.at:139: ok -417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ... +418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -407. types.at:139: ok - +stdout: +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -410. types.at:139: ok -418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +399. types.at:139: ok 419. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +401. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -412. types.at:139: ok 420. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +400. types.at:139: ok + stderr: +421. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' stdout: ./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +405. types.at:139: ok ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: ./check + ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +410. types.at:139: 422. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... + ok ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: ./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +423. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS --std=c++03 not supported -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stdout: stderr: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stdout: -./types.at:139: ./check --std=c++11 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +406. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +404. types.at:139: ok stdout: -./types.at:139: ./check --std=c++98 not supported + +./types.at:139: $PREPARSER ./test + +403. types.at:139: ok +424. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test + +425. types.at:139: testing glr.cc api.value.type={double} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +426. types.at:139: testing glr.cc api.value.type={double} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +409. types.at:139: ok +407. types.at:139: ok + + +427. types.at:139: testing glr.cc api.value.type={variant} ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +428. types.at:139: testing glr.cc api.value.type={variant} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./headers.at:394: $PREPARSER ./parser ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: stdout: +stderr: +./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: ./check --std=c++11 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stdout: stderr: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: ./check +310. headers.at:199: ok + +429. types.at:139: testing glr.cc api.value.type={struct foo} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +412. types.at:139: ok + +430. types.at:139: testing glr.cc api.value.type={struct foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -413. types.at:139: ok - -421. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stdout: -./types.at:139: ./check +411. types.at:139: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: ./check + +431. types.at:139: testing glr.cc api.value.type={struct bar} ... +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -13322,9 +13368,11 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +408. types.at:139: ok + +432. types.at:139: testing glr.cc api.value.type={struct bar} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13333,6 +13381,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13340,7 +13389,6 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13354,6 +13402,7 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -13363,393 +13412,277 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stdout: ./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stdout: ./types.at:139: ./check --std=c++11 not supported +-std=c++98 not supported +======== Testing with C++ standard flags: '' +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: ./check stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +-std=c++03 not supported +======== Testing with C++ standard flags: '' ./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +-std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +-std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: -./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stdout: ./types.at:139: ./check +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: ./types.at:139: $PREPARSER ./test +-std=c++11 not supported +======== Testing with C++ standard flags: '' stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stdout: ./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -414. types.at:139: ok - -422. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -419. types.at:139: ok - -423. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +-std=c++11 not supported +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +-std=c++98 not supported +======== Testing with C++ standard flags: '' stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./types.at:139: ./check stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: ./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: ./types.at:139: ./check -std=c++11 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -417. types.at:139: ok - -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -424. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +-std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -420. types.at:139: ok - -425. types.at:139: testing glr.cc api.value.type={double} ... +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -13759,91 +13692,34 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -stdout: -stderr: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -418. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -426. types.at:139: testing glr.cc api.value.type={double} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +413. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: stderr: -stdout: + ./types.at:139: $PREPARSER ./test -./types.at:139: ./check --std=c++11 not supported -======== Testing with C++ standard flags: '' +stdout: stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -421. types.at:139: ok - -427. types.at:139: testing glr.cc api.value.type={variant} ... +./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +433. types.at:139: testing glr.cc api.value.type={union foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: @@ -13852,9 +13728,11 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -415. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +414. types.at:139: ok -428. types.at:139: testing glr.cc api.value.type={variant} %header ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +434. types.at:139: testing glr.cc api.value.type={union foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -13872,6 +13750,10 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: @@ -13887,226 +13769,196 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -416. types.at:139: ok - -429. types.at:139: testing glr.cc api.value.type={struct foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: $PREPARSER ./test stderr: stdout: +stderr: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' -422. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -430. types.at:139: testing glr.cc api.value.type={struct foo} %header ... -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: ./check stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +417. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test + stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +435. types.at:139: testing glr.cc %union { float fval; int ival; }; ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +415. types.at:139: ok + +436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: stderr: -./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: +./types.at:139: ./check stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -423. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +416. types.at:139: ok -431. types.at:139: testing glr.cc api.value.type={struct bar} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test +437. types.at:139: testing glr.cc api.value.type=union ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $PREPARSER ./test stdout: +stderr: ./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14117,19 +13969,17 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14137,15 +13987,10 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14155,6 +14000,7 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14176,12 +14022,8 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -424. types.at:139: ok - -432. types.at:139: testing glr.cc api.value.type={struct bar} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14190,6 +14032,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14202,17 +14045,11 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -14220,49 +14057,46 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -425. types.at:139: ok - +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stderr: stdout: +stdout: ./types.at:139: $PREPARSER ./test -433. types.at:139: testing glr.cc api.value.type={union foo} ... +./types.at:139: ./check stderr: -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: @@ -14270,14 +14104,16 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: @@ -14286,37 +14122,41 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -14339,81 +14179,52 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -428. types.at:139: ok -426. types.at:139: ok - - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -434. types.at:139: testing glr.cc api.value.type={union foo} %header ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -435. types.at:139: testing glr.cc %union { float fval; int ival; }; ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -427. types.at:139: ok - stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stderr: stdout: +./types.at:139: ./check +stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -14425,15 +14236,18 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14442,6 +14256,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14449,17 +14264,15 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -429. types.at:139: ok - -437. types.at:139: testing glr.cc api.value.type=union ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14468,54 +14281,57 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stdout: +stderr: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -14528,10 +14344,11 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14539,15 +14356,13 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -14560,42 +14375,36 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -430. types.at:139: ok - -438. types.at:139: testing glr.cc api.value.type=union %header ... -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -14605,32 +14414,19 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -431. types.at:139: ok - -439. types.at:139: testing glr2.cc api.value.type={double} ... -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +420. types.at:139: ok + +438. types.at:139: testing glr.cc api.value.type=union %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14639,6 +14435,7 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14646,25 +14443,17 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test stderr: stderr: stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -14678,368 +14467,250 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -432. types.at:139: ok +419. types.at:139: ok -440. types.at:139: testing glr2.cc api.value.type={double} %header ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +439. types.at:139: testing glr2.cc api.value.type={double} ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -stdout: ./types.at:139: ./check -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +422. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++98 not supported +./types.at:139: $PREPARSER ./test +440. types.at:139: testing glr2.cc api.value.type={double} %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +424. types.at:139: ok + stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +441. types.at:139: testing glr2.cc api.value.type={variant} ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test +421. types.at:139: ok stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -433. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test -stderr: -stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: +./types.at:139: ./check -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -441. types.at:139: testing glr2.cc api.value.type={variant} ... +442. types.at:139: testing glr2.cc api.value.type={variant} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -434. types.at:139: ok -stderr: -stdout: - -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -442. types.at:139: testing glr2.cc api.value.type={variant} %header ... ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -435. types.at:139: ok +423. types.at:139: ok 443. types.at:139: testing glr2.cc api.value.type={struct foo} ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -436. types.at:139: ok -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -437. types.at:139: ok - +418. types.at:139: ok stderr: stdout: + ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -445. types.at:139: testing glr2.cc api.value.type={struct bar} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check -stderr: --std=c++03 not supported -======== Testing with C++ standard flags: '' -stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +425. types.at:139: ok + +445. types.at:139: testing glr2.cc api.value.type={struct bar} ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test stderr: stdout: +stderr: ./types.at:139: ./check +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +428. types.at:139: ok + stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ... +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check @@ -15050,227 +14721,184 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +429. types.at:139: ok + stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported +447. types.at:139: testing glr2.cc api.value.type={union foo} ... ======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: ./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -438. types.at:139: ok - -446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ... +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example . c A A $end + First reduce derivation $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ] + Second example . c A A $end + Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example b . c A A $end + First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] + Second example b . A $end + Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example c . c A A $end + First reduce derivation $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] + Second example c . A $end + Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example b c . A + Shift derivation a -> [ b d -> [ c . A ] ] + Second example b c . c A A $end + Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example b c . c A A $end + First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] + Second example b c . A $end + Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example b c . A + Shift derivation a -> [ b d -> [ c . A ] ] + Second example b c . A $end + Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example b d . + First reduce derivation a -> [ b d . ] + Second reduce derivation a -> [ b d -> [ d . ] ] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example c d . + First reduce derivation a -> [ c d . ] + Second reduce derivation a -> [ c d -> [ d . ] ] +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +427. types.at:139: ok +270. counterexample.at:610: ok stderr: stdout: ./types.at:139: ./check --std=c++98 not supported + + +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +448. types.at:139: testing glr2.cc api.value.type={union foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported +449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stderr: stdout: -stdout: ./types.at:139: ./check ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +426. types.at:139: ok + +450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +431. types.at:139: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ok + stderr: stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $PREPARSER ./test +451. types.at:139: testing glr2.cc api.value.type=union ... stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +430. types.at:139: ok + +452. types.at:139: testing glr2.cc api.value.type=union %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check @@ -15285,79 +14913,62 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +432. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +453. types.at:377: testing lalr1.cc: Named %union ... +-std=c++03 not supported ======== Testing with C++ standard flags: '' +./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +453. types.at:377: ok + stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: +./types.at:139: ./check ./types.at:139: $PREPARSER ./test +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +454. types.at:377: testing glr.cc: Named %union ... +./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -439. types.at:139: ok - -447. types.at:139: testing glr2.cc api.value.type={union foo} ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +454. types.at:377: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + +455. scanner.at:326: testing Token numbers: yacc.c ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -15365,103 +14976,74 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -stdout: stderr: stdout: +stdout: ./types.at:139: ./check ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported +======== Testing with C++ standard flags: '' +-std=c++98 not supported ======== Testing with C++ standard flags: '' -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: stderr: stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' +./types.at:139: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: ./check stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -440. types.at:139: ok +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +455. scanner.at:326: ok -448. types.at:139: testing glr2.cc api.value.type={union foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -15469,6 +15051,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./types.at:139: ./check @@ -15483,32 +15066,16 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -15519,61 +15086,33 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $PREPARSER ./test +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -441. types.at:139: ok +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +456. scanner.at:326: ok + stderr: stdout: ./types.at:139: ./check -std=c++98 not supported ======== Testing with C++ standard flags: '' - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ... -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +457. scanner.at:326: testing Token numbers: glr.c ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' -stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -stdout: -./types.at:139: ./check ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -15583,71 +15122,45 @@ stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -stdout: -stderr: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -444. types.at:139: ok -443. types.at:139: ok - - -450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ... -======== Testing with C++ standard flags: '' -451. types.at:139: testing glr2.cc api.value.type=union ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +433. types.at:139: ok + +458. scanner.at:326: testing Token numbers: glr.c api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: -======== Testing with C++ standard flags: '' ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./types.at:139: ./check @@ -15656,181 +15169,140 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +434. types.at:139: ok + stderr: +459. scanner.at:326: testing Token numbers: lalr1.cc ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stdout: ./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -442. types.at:139: ok - -452. types.at:139: testing glr2.cc api.value.type=union %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +457. scanner.at:326: ok + +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -445. types.at:139: ok - -453. types.at:377: testing lalr1.cc: Named %union ... -./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -453. types.at:377: ok - -454. types.at:377: testing glr.cc: Named %union ... -./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -454. types.at:377: ok - -455. scanner.at:326: testing Token numbers: yacc.c ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' stderr: +stdout: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check -std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++03 not supported +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: ./types.at:139: ./check -stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./scanner.at:326: $PREPARSER ./input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -455. scanner.at:326: ok -stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: ./check - --std=c++98 not supported +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: ./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./scanner.at:326: $PREPARSER ./input -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -456. scanner.at:326: ok - -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -457. scanner.at:326: testing Token numbers: glr.c ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -446. types.at:139: ok - -458. scanner.at:326: testing Token numbers: glr.c api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +======== Testing with C++ standard flags: '' stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +435. types.at:139: ok + +461. scanner.at:326: testing Token numbers: glr.cc ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: ./types.at:139: ./check @@ -15839,268 +15311,254 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -stderr: -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:326: $PREPARSER ./input -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -457. scanner.at:326: ok - -459. scanner.at:326: testing Token numbers: lalr1.cc ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./scanner.at:326: $PREPARSER ./input stderr: ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: 458. scanner.at:326: ok -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +462. scanner.at:326: testing Token numbers: glr.cc api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./scanner.at:326: $PREPARSER ./input -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -459. scanner.at:326: ok - -461. scanner.at:326: testing Token numbers: glr.cc ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./scanner.at:326: $PREPARSER ./input -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -460. scanner.at:326: ok - -462. scanner.at:326: testing Token numbers: glr.cc api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test stderr: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' stdout: ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +stderr: +stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./scanner.at:326: $PREPARSER ./input +./types.at:139: ./check stderr: ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -461. scanner.at:326: ok +-std=c++98 not supported +======== Testing with C++ standard flags: '' +459. scanner.at:326: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 463. scanner.at:326: testing Token numbers: glr2.cc ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ./scanner.at:326: $PREPARSER ./input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -462. scanner.at:326: ok - -464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +460. scanner.at:326: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +436. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: -./types.at:139: $PREPARSER ./test +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc stderr: stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: $PREPARSER ./test ./scanner.at:326: $PREPARSER ./input stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -463. scanner.at:326: ok +461. scanner.at:326: ok +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 465. scanner.at:326: testing Token numbers: lalr1.d ... -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y + +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 465. scanner.at:326: skipped (scanner.at:326) - 466. scanner.at:326: testing Token numbers: lalr1.d api.token.raw ... +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y +./scanner.at:326: $PREPARSER ./input stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -466. scanner.at:326: skipped (scanner.at:326) +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 467. scanner.at:326: testing Token numbers: lalr1.java ... +stderr: +462. scanner.at:326: ok +stdout: +./types.at:139: $PREPARSER ./test ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y stderr: stdout: +stderr: + ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -467. scanner.at:326: skipped (scanner.at:326) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +466. scanner.at:326: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + skipped (scanner.at:326) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +437. types.at:139: ok +stderr: +stdout: 468. scanner.at:326: testing Token numbers: lalr1.java api.token.raw ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y -468. scanner.at:326: skipped (scanner.at:326) +./types.at:139: $PREPARSER ./test +stderr: 469. scanner.at:330: testing Token numbers: lalr1.cc api.token.raw api.value.type=variant api.token.constructor ... +467. scanner.at:326: skipped (scanner.at:326) ./scanner.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./scanner.at:330: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: + +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +470. calc.at:1334: testing Calculator parse.trace ... +./calc.at:1334: mv calc.y.tmp calc.y + +./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +468. scanner.at:326: skipped (scanner.at:326) ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./scanner.at:326: $PREPARSER ./input -stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -464. scanner.at:326: ok +471. calc.at:1336: testing Calculator %header ... +./calc.at:1336: mv calc.y.tmp calc.y + +./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./scanner.at:330: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./calc.at:1334: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +472. calc.at:1337: testing Calculator %debug %locations ... +./calc.at:1337: mv calc.y.tmp calc.y + +./calc.at:1336: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -470. calc.at:1334: testing Calculator parse.trace ... -./calc.at:1334: mv calc.y.tmp calc.y - -./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1337: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1334: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./calc.at:1334: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +stdout: ./calc.at:1334: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -16112,7 +15570,19 @@ || /\t/ )' calc.c +./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' input: +./calc.at:1336: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -16127,6 +15597,23 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1334: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1336: $PREPARSER ./calc input +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -17145,7 +16632,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +stderr: ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: Starting parse Entering state 0 @@ -18167,8 +17656,14 @@ ./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: | 1 2 +./calc.at:1336: $PREPARSER ./calc input +input: + | 1 2 ./calc.at:1334: $PREPARSER ./calc input stderr: +syntax error +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -18191,6 +17686,8 @@ Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +stderr: Starting parse Entering state 0 Stack now 0 @@ -18211,6 +17708,16 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18221,11 +17728,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1336: cat stderr ./calc.at:1334: cat stderr input: +input: + | 1//2 +./calc.at:1336: $PREPARSER ./calc input | 1//2 ./calc.at:1334: $PREPARSER ./calc input stderr: +syntax error +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -18255,6 +17769,8 @@ Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +syntax error Starting parse Entering state 0 Stack now 0 @@ -18282,6 +17798,16 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18293,10 +17819,14 @@ }eg ' expout || exit 77 ./calc.at:1334: cat stderr +./calc.at:1336: cat stderr +input: input: | error ./calc.at:1334: $PREPARSER ./calc input + | error stderr: +./calc.at:1336: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -18307,6 +17837,9 @@ Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -18315,6 +17848,8 @@ syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 +stderr: +syntax error ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18325,15 +17860,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: cat stderr -stderr: -stdout: +./calc.at:1336: cat stderr input: | 1 = 2 = 3 -./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ./calc.at:1334: $PREPARSER ./calc input +input: stderr: -./scanner.at:330: $PREPARSER ./input + | 1 = 2 = 3 +./calc.at:1336: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -18375,6 +17920,8 @@ Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -18415,7 +17962,8 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.1: ) Stack now 0 -./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18426,12 +17974,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -469. scanner.at:330: ok +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: cat stderr input: +./calc.at:1336: cat stderr | | +1 - ./calc.at:1334: $PREPARSER ./calc input stderr: Starting parse @@ -18460,6 +18017,11 @@ Cleanup: discarding lookahead token '+' (1.1: ) Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1336: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -18486,6 +18048,26 @@ Stack now 0 Cleanup: discarding lookahead token '+' (1.1: ) Stack now 0 +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +======== Testing with C++ standard flags: '' ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18496,8 +18078,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1336: cat stderr ./calc.at:1334: cat stderr ./calc.at:1334: $PREPARSER ./calc /dev/null +./calc.at:1336: $PREPARSER ./calc /dev/null +stderr: stderr: Starting parse Entering state 0 @@ -18507,11 +18093,12 @@ syntax error Cleanup: discarding lookahead token "end of input" (1.1: ) Stack now 0 +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -471. calc.at:1336: testing Calculator %header ... stderr: -./calc.at:1336: mv calc.y.tmp calc.y - +stderr: +syntax error Starting parse Entering state 0 Stack now 0 @@ -18520,7 +18107,19 @@ syntax error Cleanup: discarding lookahead token "end of input" (1.1: ) Stack now 0 -./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +stdout: +./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18531,10 +18130,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1337: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1334: cat stderr +./calc.at:1336: cat stderr +./calc.at:1337: $PREPARSER ./calc input input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: ./calc.at:1334: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1336: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -18853,168 +18489,1193 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +stderr: ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): +Stack now 0 1 +Reducing stack by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) Stack now 0 8 21 4 Shifting token error (1.1: ) Entering state 11 @@ -19172,6 +19833,1038 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 123): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 124): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 2 +./calc.at:1337: $PREPARSER ./calc input ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -19182,10 +20875,76 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: cat stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 ./calc.at:1334: cat stderr +stdout: +./types.at:139: ./check +input: input: | (!!) + (1 2) = 1 +./calc.at:1336: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (!!) + (1 2) = 1 +stderr: +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -19328,6 +21087,8 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 stderr: Starting parse Entering state 0 @@ -19469,6 +21230,78 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +./calc.at:1337: cat stderr +input: + | 1//2 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1336: cat stderr ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -19479,12 +21312,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (- *) + (1 2) = 1 +./calc.at:1336: $PREPARSER ./calc input ./calc.at:1334: cat stderr +stderr: +./calc.at:1337: cat stderr +syntax error +syntax error +error: 2222 != 1 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: +syntax error +syntax error +error: 2222 != 1 | (- *) + (1 2) = 1 +input: ./calc.at:1334: $PREPARSER ./calc input + | error stderr: +./calc.at:1337: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -19639,6 +21497,16 @@ Entering state 0 Stack now 0 Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +stderr: +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -19783,6 +21651,36 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -19793,11 +21691,65 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1336: cat stderr +./calc.at:1337: cat stderr ./calc.at:1334: cat stderr input: +input: +input: + | 1 = 2 = 3 + | (* *) + (*) + (*) +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input | (* *) + (*) + (*) ./calc.at:1334: $PREPARSER ./calc input stderr: +stderr: +syntax error +syntax error +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +stderr: +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -19951,6 +21903,50 @@ Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +stderr: +syntax error +syntax error +syntax error Starting parse Entering state 0 Stack now 0 @@ -20102,6 +22098,26 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20112,10 +22128,74 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1337: cat stderr +./calc.at:1336: cat stderr +input: ./calc.at:1334: cat stderr + | + | +1 +./calc.at:1337: $PREPARSER ./calc input +input: +stderr: input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !+ ++ ./calc.at:1334: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1336: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 stderr: Starting parse Entering state 0 @@ -20199,6 +22279,8 @@ Cleanup: popping nterm exp (1.1: 7) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -20280,10 +22362,26 @@ Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) ./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !- ++ input: +./calc.at:1336: $PREPARSER ./calc input +stderr: | 1 + 2 * 3 + !- ++ +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1334: $PREPARSER ./calc input stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -20366,6 +22464,7 @@ Cleanup: popping nterm exp (1.1: 7) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1337: cat stderr Starting parse Entering state 0 Stack now 0 @@ -20446,6 +22545,18 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) +./calc.at:1337: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20456,11 +22567,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1336: cat stderr ./calc.at:1334: cat stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1336: $PREPARSER ./calc input input: | 1 + 2 * 3 + !* ++ ./calc.at:1334: $PREPARSER ./calc input stderr: +./calc.at:1337: cat stderr +memory exhausted +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -20544,8 +22691,11 @@ Cleanup: popping nterm exp (1.1: 7) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +memory exhausted + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1337: $PREPARSER ./calc input stderr: -stdout: Starting parse Entering state 0 Stack now 0 @@ -20627,10 +22777,326 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -20640,13 +23106,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1334: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1334: $PREPARSER ./calc input stderr: +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -20656,119 +23126,334 @@ Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) +Next token is token ')' (1.2: ) +1.2: syntax error +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 21 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Stack now 0 8 21 4 -Shifting token error (1.1: ) +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: cat stderr +./calc.at:1336: cat stderr +input: +input: + | (#) + (#) = 2222 +./calc.at:1334: $PREPARSER ./calc input +./calc.at:1337: cat stderr + | (#) + (#) = 2222 +./calc.at:1336: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -20891,21 +23576,17 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: cat stderr input: - | (1 + #) = 1111 -./calc.at:1334: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: + | (!!) + (1 2) = 1 +./calc.at:1337: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -20915,26 +23596,7 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 @@ -20958,127 +23620,69 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 Reading a token syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Next token is token "invalid token" (1.1: ) Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) -Stack now 0 4 +Stack now 0 8 21 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Reading a token Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -21086,7 +23690,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -21104,21 +23708,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1334: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 @@ -21129,96 +23718,139 @@ Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: ./check Starting parse Entering state 0 Stack now 0 @@ -21228,93 +23860,147 @@ Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -21325,11 +24011,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1334: cat stderr +./calc.at:1336: cat stderr +./calc.at:1337: cat stderr input: - | (1 + # + 1) = 1111 + | (1 + #) = 1111 ./calc.at:1334: $PREPARSER ./calc input +input: stderr: + | (- *) + (1 2) = 1 Starting parse Entering state 0 Stack now 0 @@ -21370,22 +24070,6 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 @@ -21444,7 +24128,12 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +./calc.at:1337: $PREPARSER ./calc input +input: ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +stderr: +./calc.at:1336: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -21486,22 +24175,6 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 @@ -21560,22 +24233,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1334: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1334: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Stack now 0 @@ -21585,139 +24242,146 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '*' (1.4: ) +1.4: syntax error +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token "number" (1.12: 2) +1.12: syntax error +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -21728,316 +24392,149 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.4: ) +1.4: syntax error +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token "number" (1.12: 2) +1.12: syntax error +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1334: cat stderr -470. calc.at:1334: ok - -472. calc.at:1337: testing Calculator %debug %locations ... -stderr: -./calc.at:1337: mv calc.y.tmp calc.y - -stdout: -./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1336: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1336: $PREPARSER ./calc input -stderr: -stderr: -stdout: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 2 -./calc.at:1336: $PREPARSER ./calc input -======== Testing with C++ standard flags: '' -stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -syntax error -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: cat stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test -input: - | 1//2 -stderr: -./calc.at:1336: $PREPARSER ./calc input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -447. types.at:139: ok -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error - -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1336: cat stderr -input: - | error -./calc.at:1336: $PREPARSER ./calc input -stderr: -stderr: -stdout: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -syntax error -473. calc.at:1338: testing Calculator %locations api.location.type={Span} ... -./calc.at:1338: mv calc.y.tmp calc.y - -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1336: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: cat stderr -input: - | - | +1 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -syntax error -./calc.at:1338: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: cat stderr -./calc.at:1336: $PREPARSER ./calc /dev/null +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -syntax error +syntax error: invalid character: '#' ./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -22047,24 +24544,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -22074,17 +24554,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -error: 2222 != 1 +syntax error: invalid character: '#' +./calc.at:1334: cat stderr +./calc.at:1337: cat stderr ./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -22095,68 +24568,514 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: cat stderr input: - | (- *) + (1 2) = 1 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: cat stderr input: | (* *) + (*) + (*) -./calc.at:1336: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1334: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -syntax error -syntax error -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1336: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1336: $PREPARSER ./calc input stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) input: - | 1 + 2 * 3 + !- ++ -./calc.at:1336: $PREPARSER ./calc input -stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -22166,72 +25085,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1336: cat stderr -input: - | 1 + 2 * 3 + !* ++ ./calc.at:1336: $PREPARSER ./calc input stderr: -memory exhausted +syntax error: invalid character: '#' ./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -./calc.at:1338: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -input: -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1338: $PREPARSER ./calc input -stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 2 -./calc.at:1338: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr -stderr: -1.3: syntax error -input: - | (#) + (#) = 2222 -./calc.at:1336: $PREPARSER ./calc input -stderr: -stdout: -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -22242,77 +25100,12 @@ }eg ' expout || exit 77 stderr: -./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1338: cat stderr -./calc.at:1337: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: +./calc.at:1337: cat stderr input: -./calc.at:1336: cat stderr - | 1//2 -./calc.at:1338: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | 1 + 2 * 3 + !+ ++ ./calc.at:1337: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + #) = 1111 -stderr: -1.3: syntax error -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' +./calc.at:1334: cat stderr stderr: Starting parse Entering state 0 @@ -22358,7 +25151,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -22366,7 +25159,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -22374,965 +25167,794 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: + | (1 + # + 1) = 1111 +./calc.at:1334: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 6 8 21 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 6 8 21 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 107): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 105): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1336: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1337: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 4 12 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 +Stack now 0 8 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1336: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 20 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +syntax error: invalid character: '#' +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1337: cat stderr +./calc.at:1334: cat stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1337: $PREPARSER ./calc input +input: + | (1 + 1) / (1 - 1) +./calc.at:1334: $PREPARSER ./calc input +stderr: +./calc.at:1336: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Stack now 0 6 8 24 33 24 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -23377,7 +25999,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -23385,7 +26007,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -23393,36 +26015,318 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: + | (1 + 1) / (1 - 1) +./calc.at:1336: $PREPARSER ./calc input +' expout || exit 77 +stderr: +error: null divisor +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1334: cat stderr +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -23432,927 +26336,1131 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +470. calc.at:1334: ok +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 105): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1336: cat stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +471. calc.at:1336: ok + +stderr: +stdout: +./calc.at:1337: cat stderr +./types.at:139: $PREPARSER ./test + +stderr: +input: + | (1 + #) = 1111 +./calc.at:1337: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 6 2 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 6 4 2 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 4 12 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: cat stderr +473. calc.at:1338: testing Calculator %locations api.location.type={Span} ... +./calc.at:1338: mv calc.y.tmp calc.y + +input: +./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (# + 1) = 1111 +./calc.at:1337: $PREPARSER ./calc input +474. calc.at:1340: testing Calculator %name-prefix "calc" ... +./calc.at:1340: mv calc.y.tmp calc.y + +./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 19 2 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 123): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1337: cat stderr +./calc.at:1338: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: + | (1 + # + 1) = 1111 +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1340: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: cat stderr +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | (1 + 1) / (1 - 1) +./calc.at:1337: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 -Stack now 0 6 4 12 24 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 27 -Stack now 0 6 4 12 27 +Stack now 0 4 12 27 Reducing stack by rule 13 (line 125): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 -Stack now 0 6 8 24 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 124): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 +Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1338: cat stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24362,64 +27470,122 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./calc.at:1337: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +472. calc.at:1337: ok + +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +475. calc.at:1341: testing Calculator %verbose ... +./calc.at:1341: mv calc.y.tmp calc.y + +./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1341: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./scanner.at:330: $PREPARSER ./input +stderr: +./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +469. scanner.at:330: ok +./calc.at:1338: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: +stdout: +./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' input: -input: - | error + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1338: $PREPARSER ./calc input - | 1 2 +./calc.at:1340: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + stderr: -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1336: cat stderr -1.1: syntax error ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1340: $PREPARSER ./calc input stderr: -1.1: syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (# + 1) = 1111 + | 1 2 +./calc.at:1338: $PREPARSER ./calc input +stderr: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) 1.3: syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1336: $PREPARSER ./calc input +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stderr: +1.3: syntax error +stdout: +./types.at:139: $PREPARSER ./test + +input: +stderr: ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -24430,8 +27596,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 2 +./calc.at:1340: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; +syntax error +======== Testing with C++ standard flags: '' +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +syntax error +input: + | 1//2 +./calc.at:1338: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24441,13 +27622,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1338: cat stderr -./calc.at:1337: cat stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; +1.3: syntax error +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24457,81 +27635,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./calc.at:1340: cat stderr +476. calc.at:1342: testing Calculator %yacc ... +./calc.at:1342: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: | 1//2 -./calc.at:1337: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1338: $PREPARSER ./calc input +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1338: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1336: cat stderr -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1338: $PREPARSER ./calc input stderr: +syntax error stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -1.7: syntax error +1.1: syntax error ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; +1.1: syntax error +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24541,9 +27673,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.7: syntax error -syntax error: invalid character: '#' ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -24554,8 +27683,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1340: cat stderr +./calc.at:1338: cat stderr +input: + | error +./calc.at:1340: $PREPARSER ./calc input +stderr: +syntax error +input: + | 1 = 2 = 3 +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.7: syntax error +stdout: +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +1.7: syntax error +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24565,36 +27715,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1338: cat stderr - | error -./calc.at:1337: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 - | - | +1 -./calc.at:1338: $PREPARSER ./calc input -stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24604,19 +27725,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1338: cat stderr +input: +./calc.at:1340: cat stderr +./calc.at:1342: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | + | +1 +./calc.at:1338: $PREPARSER ./calc input input: +stderr: + | 1 = 2 = 3 +./calc.at:1340: $PREPARSER ./calc input 2.1: syntax error ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1336: $PREPARSER ./calc input stderr: stderr: -error: null divisor +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 2.1: syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: cat stderr stderr: -error: null divisor +syntax error ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -24627,11 +27755,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1337: $PREPARSER ./calc input -stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24641,95 +27765,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1338: cat stderr ./calc.at:1338: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 +./calc.at:1340: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: 1.1: syntax error ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr stderr: +stderr: +stdout: 1.1: syntax error -./calc.at:1337: "$PERL" -pi -e 'use strict; +./types.at:139: $PREPARSER ./test +input: + | + | +1 +./calc.at:1340: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24739,7 +27795,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +stderr: +======== Testing with C++ standard flags: '' +stdout: +./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1341: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1338: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1338: $PREPARSER ./calc input +input: +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24749,13 +27829,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -471. calc.at:1336: ok -./calc.at:1338: cat stderr - -./calc.at:1337: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1338: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1341: $PREPARSER ./calc input stderr: 1.2: syntax error 1.18: syntax error @@ -24763,37 +27850,8 @@ 1.41: syntax error 1.1-46: error: 4444 != 1 ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1337: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.2: syntax error 1.18: syntax error @@ -24801,31 +27859,9 @@ 1.41: syntax error 1.1-46: error: 4444 != 1 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 +./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1340: cat stderr +./calc.at:1340: $PREPARSER ./calc /dev/null ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -24836,7 +27872,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: "$PERL" -pi -e 'use strict; +stderr: +input: +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1341: $PREPARSER ./calc input +stderr: +syntax error +stderr: +syntax error +./calc.at:1338: cat stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24846,40 +27896,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -474. calc.at:1340: testing Calculator %name-prefix "calc" ... -./calc.at:1340: mv calc.y.tmp calc.y - -./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1338: cat stderr -./calc.at:1337: cat stderr input: | (!!) + (1 2) = 1 ./calc.at:1338: $PREPARSER ./calc input -./calc.at:1337: $PREPARSER ./calc /dev/null -stderr: stderr: 1.11: syntax error 1.1-16: error: 2222 != 1 ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24892,7 +27916,8 @@ stderr: 1.11: syntax error 1.1-16: error: 2222 != 1 -./calc.at:1337: cat stderr +./calc.at:1340: cat stderr +./calc.at:1341: cat stderr ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -24905,650 +27930,52 @@ ' expout || exit 77 input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1337: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: $PREPARSER ./calc input +input: ./calc.at:1338: cat stderr + | 1//2 +./calc.at:1341: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | (- *) + (1 2) = 1 ./calc.at:1338: $PREPARSER ./calc input stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25558,15 +27985,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1340: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +stdout: +./types.at:139: ./check ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -25577,307 +27998,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1341: cat stderr +./calc.at:1340: cat stderr ./calc.at:1338: cat stderr input: input: + | error +./calc.at:1341: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +input: +./calc.at:1340: $PREPARSER ./calc input | (* *) + (*) + (*) ./calc.at:1338: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1337: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +error: 2222 != 1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error 1.10: syntax error 1.16: syntax error ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stderr: +syntax error 1.2: syntax error 1.10: syntax error 1.16: syntax error -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: "$PERL" -pi -e 'use strict; +syntax error +error: 2222 != 1 +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25887,7 +28042,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25897,325 +28052,51 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr -stderr: +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1338: cat stderr +./calc.at:1341: cat stderr +./calc.at:1340: cat stderr input: -stdout: + | 1 + 2 * 3 + !+ ++ +./calc.at:1338: $PREPARSER ./calc input +input: +input: +stderr: + | 1 = 2 = 3 +./calc.at:1341: $PREPARSER ./calc input | (- *) + (1 2) = 1 -./calc.at:1337: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1338: cat stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: $PREPARSER ./calc input stderr: +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -448. types.at:139: input: - ok +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1338: $PREPARSER ./calc input -stdout: +./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error stderr: -./types.at:139: $PREPARSER ./test -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +syntax error +syntax error +error: 2222 != 1 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26225,329 +28106,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -stderr: -stderr: -./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -input: -./calc.at:1337: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1 + 2 * 3 + !- ++ -./calc.at:1338: $PREPARSER ./calc input -input: stderr: ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1337: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26557,6 +28118,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1341: cat stderr +./calc.at:1340: cat stderr +input: ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -26567,273 +28132,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -475. calc.at:1341: testing Calculator %verbose ... -./calc.at:1341: mv calc.y.tmp calc.y - -./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1337: cat stderr -./calc.at:1338: cat stderr + | + | +1 +./calc.at:1341: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1338: cat stderr + | (* *) + (*) + (*) +syntax error +./calc.at:1340: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1337: $PREPARSER ./calc input | 1 + 2 * 3 + !* ++ ./calc.at:1338: $PREPARSER ./calc input stderr: -1.14: memory exhausted +syntax error +syntax error +syntax error stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: 1.14: memory exhausted - | 1 + 2 * 3 + !- ++ -./calc.at:1337: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26843,91 +28181,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stdout: -./types.at:139: ./check -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26937,21 +28191,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1341: cat stderr +./calc.at:1341: $PREPARSER ./calc /dev/null +./calc.at:1340: cat stderr +stderr: +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1338: cat stderr input: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1340: $PREPARSER ./calc input +syntax error +stderr: +input: +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (#) + (#) = 2222 ./calc.at:1338: $PREPARSER ./calc input -./calc.at:1341: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: -./calc.at:1337: cat stderr +stderr: +./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26961,182 +28225,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' input: - | 1 + 2 * 3 + !* ++ -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1338: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (1 + #) = 1111 -./calc.at:1338: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1340: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.6: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27147,10 +28243,10 @@ }eg ' expout || exit 77 stderr: -1.6: syntax error: invalid character: '#' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1337: cat stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1341: cat stderr +./calc.at:1338: cat stderr +input: +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27160,264 +28256,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 input: -./calc.at:1338: cat stderr - | (#) + (#) = 2222 -./calc.at:1337: $PREPARSER ./calc input -input: +./calc.at:1341: $PREPARSER ./calc input stderr: - | (# + 1) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 ./calc.at:1338: $PREPARSER ./calc input +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1337: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1340: cat stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27427,6 +28287,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1340: $PREPARSER ./calc input +stderr: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +memory exhausted +1.6: syntax error: invalid character: '#' +./calc.at:1341: cat stderr +stderr: +stdout: ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27437,119 +28311,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr -./calc.at:1338: cat stderr -input: - | (1 + #) = 1111 -input: -./calc.at:1337: $PREPARSER ./calc input -stderr: -stdout: - | (1 + # + 1) = 1111 -./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: -./calc.at:1338: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: "$PERL" -ne ' +./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1342: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -27560,121 +28323,8 @@ || /\t/ )' calc.c -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.6: syntax error: invalid character: '#' input: -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27684,6 +28334,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1341: $PREPARSER ./calc input +stderr: +syntax error +error: 2222 != 1 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -27697,226 +28355,30 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1340: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: cat stderr +./calc.at:1340: cat stderr +syntax error +error: 2222 != 1 stderr: -./calc.at:1338: cat stderr -./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | (# + 1) = 1111 -./calc.at:1337: $PREPARSER ./calc input -input: - | 1 2 -./calc.at:1340: $PREPARSER ./calc input +./calc.at:1338: $PREPARSER ./calc input stderr: - | (1 + 1) / (1 - 1) stderr: -./calc.at:1338: $PREPARSER ./calc input -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token +input: +./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr 1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor + | (#) + (#) = 2222 +./calc.at:1340: $PREPARSER ./calc input ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.11-17: error: null divisor -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27926,7 +28388,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +stderr: +input: +syntax error: invalid character: '#' +syntax error: invalid character: '#' + | 1 2 +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27936,7 +28404,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1342: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr +input: +stderr: +syntax error +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27946,264 +28422,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1337: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1341: $PREPARSER ./calc input ./calc.at:1338: cat stderr -input: - | 1//2 -./calc.at:1340: $PREPARSER ./calc input stderr: input: syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1337: $PREPARSER ./calc input -stderr: -473. calc.at:1338: ok syntax error +error: 2222 != 1 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28213,325 +28443,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -input: - | error -./calc.at:1340: $PREPARSER ./calc input -stderr: + | (1 + # + 1) = 1111 +./calc.at:1338: $PREPARSER ./calc input syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: cat stderr -stderr: syntax error -input: - | (1 + 1) / (1 - 1) -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +error: 2222 != 1 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 = 2 = 3 + | (1 + #) = 1111 ./calc.at:1340: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1342: cat stderr stderr: -476. calc.at:1342: testing Calculator %yacc ... -syntax error -./calc.at:1337: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28541,15 +28471,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28559,20 +28481,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1337: cat stderr -472. calc.at:1337: ok -./calc.at:1340: cat stderr - -input: - | - | +1 -./calc.at:1340: $PREPARSER ./calc input + | 1//2 +./calc.at:1342: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' stderr: syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error +./calc.at:1341: cat stderr +./calc.at:1338: cat stderr ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28583,19 +28502,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1342: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1340: $PREPARSER ./calc /dev/null -477. calc.at:1343: testing Calculator parse.error=detailed ... -./calc.at:1343: mv calc.y.tmp calc.y - -stderr: -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: +input: + | (1 + 1) / (1 - 1) +./calc.at:1338: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1340: "$PERL" -pi -e 'use strict; +1.11-17: error: null divisor + | (* *) + (*) + (*) +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28605,39 +28521,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -input: -stderr: -stdout: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: syntax error syntax error syntax error -syntax error -error: 4444 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: cat stderr stderr: +stderr: +1.11-17: error: null divisor syntax error syntax error syntax error -syntax error -error: 4444 != 1 -./calc.at:1341: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - input: -./calc.at:1340: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1342: cat stderr +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28647,54 +28547,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1341: $PREPARSER ./calc input -stderr: -./calc.at:1340: cat stderr -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1340: $PREPARSER ./calc input stderr: -syntax error -error: 2222 != 1 +syntax error: invalid character: '#' ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -syntax error -error: 2222 != 1 - | 1 2 -./calc.at:1341: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | error +./calc.at:1342: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28705,25 +28564,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1340: $PREPARSER ./calc input -stderr: -./calc.at:1341: cat stderr -syntax error -syntax error -error: 2222 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' stderr: +./calc.at:1338: cat stderr syntax error -syntax error -error: 2222 != 1 -input: - | 1//2 -./calc.at:1341: $PREPARSER ./calc input +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error +473. calc.at:1338: ok +./calc.at:1341: cat stderr ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28734,11 +28583,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1340: cat stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28748,26 +28596,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1340: cat stderr +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: input: - | (* *) + (*) + (*) +./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1342: cat stderr +stderr: + | (1 + # + 1) = 1111 ./calc.at:1340: $PREPARSER ./calc input +stdout: +./types.at:139: $PREPARSER ./test stderr: -syntax error -syntax error -syntax error +syntax error: invalid character: '#' +input: ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: cat stderr -stderr: -syntax error -syntax error -syntax error input: - | error + | 1 + 2 * 3 + !- ++ + | 1 = 2 = 3 +./calc.at:1342: $PREPARSER ./calc input +stderr: ./calc.at:1341: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +stderr: +stderr: syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +stderr: +syntax error +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28778,8 +28642,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28789,24 +28652,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1341: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1340: $PREPARSER ./calc input -input: -stderr: - | 1 = 2 = 3 -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -stderr: -./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28817,16 +28662,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ +./calc.at:1340: cat stderr +input: + | (1 + 1) / (1 - 1) ./calc.at:1340: $PREPARSER ./calc input +./calc.at:1342: cat stderr +./calc.at:1341: cat stderr stderr: +error: null divisor ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +477. calc.at:1343: testing Calculator parse.error=detailed ... +./calc.at:1343: mv calc.y.tmp calc.y + +input: stderr: -./calc.at:1341: cat stderr +./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: + | 1 + 2 * 3 + !* ++ +error: null divisor +./calc.at:1341: $PREPARSER ./calc input | | +1 -./calc.at:1341: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input +stderr: +memory exhausted +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +memory exhausted +stderr: +syntax error ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28837,18 +28704,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1340: cat stderr -syntax error -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1340: $PREPARSER ./calc input -stderr: -memory exhausted -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28859,10 +28714,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -memory exhausted -./calc.at:1341: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28872,21 +28724,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: $PREPARSER ./calc /dev/null +./calc.at:1341: cat stderr +./calc.at:1342: cat stderr ./calc.at:1340: cat stderr +input: +./calc.at:1342: $PREPARSER ./calc /dev/null + | (#) + (#) = 2222 +./calc.at:1341: $PREPARSER ./calc input +stderr: stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -./calc.at:1340: $PREPARSER ./calc input +474. calc.at:1340: ok stderr: syntax error stderr: syntax error: invalid character: '#' syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; + +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28897,11 +28756,33 @@ }eg ' expout || exit 77 stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' +stdout: +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1342: cat stderr ./calc.at:1341: cat stderr input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1342: $PREPARSER ./calc input +stderr: +input: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 ./calc.at:1341: $PREPARSER ./calc input stderr: syntax error @@ -28909,8 +28790,14 @@ syntax error syntax error error: 4444 != 1 +stderr: +./calc.at:1343: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +syntax error: invalid character: '#' ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +stderr: +syntax error: invalid character: '#' +478. calc.at:1344: testing Calculator parse.error=verbose ... +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28920,13 +28807,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1340: cat stderr +./calc.at:1344: mv calc.y.tmp calc.y + ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28937,40 +28819,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1340: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1342: cat stderr ./calc.at:1341: cat stderr -stderr: -syntax error: invalid character: '#' input: -stderr: -stdout: +input: | (!!) + (1 2) = 1 +./calc.at:1342: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | (# + 1) = 1111 ./calc.at:1341: $PREPARSER ./calc input -./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: +stderr: +syntax error: invalid character: '#' syntax error error: 2222 != 1 ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' stderr: syntax error error: 2222 != 1 -./calc.at:1342: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28980,21 +28851,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29005,28 +28861,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1342: cat stderr +./calc.at:1341: cat stderr +input: + | (- *) + (1 2) = 1 ./calc.at:1342: $PREPARSER ./calc input stderr: -input: +stderr: +syntax error +syntax error +error: 2222 != 1 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stdout: + | (1 + # + 1) = 1111 +./types.at:139: $PREPARSER ./test +./calc.at:1341: $PREPARSER ./calc input stderr: - | (# + 1) = 1111 -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1341: cat stderr stderr: +syntax error +syntax error +error: 2222 != 1 syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 2 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1342: $PREPARSER ./calc input stderr: -input: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29036,28 +28900,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1341: $PREPARSER ./calc input +./calc.at:1341: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1344: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +stdout: +./calc.at:1342: cat stderr +./types.at:139: $PREPARSER ./test +input: +./calc.at:1341: cat stderr stderr: +input: + | (* *) + (*) + (*) +./calc.at:1342: $PREPARSER ./calc input stderr: syntax error syntax error -error: 2222 != 1 +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1341: $PREPARSER ./calc input +stderr: +error: null divisor +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +stderr: syntax error -./calc.at:1340: cat stderr syntax error syntax error -error: 2222 != 1 -input: - | (1 + # + 1) = 1111 -./calc.at:1340: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' +error: null divisor +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29079,7 +28961,24 @@ }eg ' expout || exit 77 ./calc.at:1342: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1341: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1342: $PREPARSER ./calc input +475. calc.at:1341: ok +stderr: +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + + | 1 + 2 * 3 + !- ++ +./calc.at:1342: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29089,36 +28988,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stdout: +./calc.at:1342: cat stderr +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +479. calc.at:1346: testing Calculator api.pure=full %locations ... +./calc.at:1346: mv calc.y.tmp calc.y + input: -./calc.at:1341: cat stderr - | 1//2 +./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | 1 + 2 * 3 + !* ++ ./calc.at:1342: $PREPARSER ./calc input stderr: -syntax error +memory exhausted ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1340: cat stderr - | (* *) + (*) + (*) -./calc.at:1341: $PREPARSER ./calc input -stderr: -stderr: -syntax error -syntax error -syntax error -syntax error -input: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1340: $PREPARSER ./calc input -stderr: -stderr: -error: null divisor -syntax error -syntax error -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: null divisor +memory exhausted ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29129,7 +29015,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1342: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1342: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29139,7 +29037,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1342: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1342: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +syntax error: invalid character: '#' +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29151,21 +29059,13 @@ ' expout || exit 77 ./calc.at:1342: cat stderr input: - | error + | (# + 1) = 1111 ./calc.at:1342: $PREPARSER ./calc input -./calc.at:1341: cat stderr stderr: -syntax error +syntax error: invalid character: '#' ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1341: $PREPARSER ./calc input -syntax error -stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error: invalid character: '#' ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29176,22 +29076,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1340: cat stderr -./calc.at:1341: $PREPARSER ./calc input -stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1342: cat stderr input: -474. calc.at:1340: ok - | 1 = 2 = 3 + | (1 + # + 1) = 1111 ./calc.at:1342: $PREPARSER ./calc input stderr: -syntax error +syntax error: invalid character: '#' ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; +stderr: +syntax error: invalid character: '#' +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29201,12 +29095,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1342: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1342: $PREPARSER ./calc input stderr: -syntax error - -./calc.at:1341: cat stderr +error: null divisor +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +error: null divisor ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29217,13 +29114,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !* ++ -./calc.at:1341: $PREPARSER ./calc input -stdout: +./calc.at:1342: cat stderr +476. calc.at:1342: ok + stderr: +stdout: ./calc.at:1343: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -memory exhausted -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -29235,10 +29131,6 @@ || /\t/ )' calc.c -./calc.at:1342: cat stderr -stderr: -memory exhausted -input: input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -29254,67 +29146,22 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1343: $PREPARSER ./calc input - | - | +1 -./calc.at:1342: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -478. calc.at:1344: testing Calculator parse.error=verbose ... stderr: -syntax error ./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1344: mv calc.y.tmp calc.y - -./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: | 1 2 ./calc.at:1343: $PREPARSER ./calc input -./calc.at:1341: cat stderr stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 syntax error, unexpected number ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations ... stderr: -input: - | (#) + (#) = 2222 +./calc.at:1347: mv calc.y.tmp calc.y + syntax error, unexpected number -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1342: cat stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: $PREPARSER ./calc /dev/null -stderr: -stderr: -syntax error -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29325,44 +29172,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error ./calc.at:1343: cat stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: | 1//2 ./calc.at:1343: $PREPARSER ./calc input stderr: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: cat stderr stderr: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -input: - | (1 + #) = 1111 -./calc.at:1341: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29373,56 +29191,55 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./types.at:139: $PREPARSER ./test stderr: -syntax error: invalid character: '#' +stdout: +./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1343: cat stderr -stderr: -./calc.at:1342: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + input: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1344: $PREPARSER ./calc input | error ./calc.at:1343: $PREPARSER ./calc input -./calc.at:1344: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -input: -======== Testing with C++ standard flags: '' +stderr: stderr: syntax error, unexpected invalid token -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1342: $PREPARSER ./calc input +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1347: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error, unexpected invalid token -stderr: -./calc.at:1341: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 +./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: - | (# + 1) = 1111 -./calc.at:1341: $PREPARSER ./calc input + | 1 2 +./calc.at:1344: $PREPARSER ./calc input stderr: +syntax error, unexpected number +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29433,28 +29250,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./calc.at:1343: cat stderr -syntax error: invalid character: '#' +syntax error, unexpected number input: | 1 = 2 = 3 -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1343: $PREPARSER ./calc input stderr: syntax error, unexpected '=' ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: "$PERL" -pi -e 'use strict; +stderr: +stdout: +stderr: +syntax error, unexpected '=' +./types.at:139: $PREPARSER ./test +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29465,15 +29275,13 @@ }eg ' expout || exit 77 stderr: -syntax error, unexpected '=' -./calc.at:1342: cat stderr -./calc.at:1341: cat stderr -input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr +======== Testing with C++ standard flags: '' input: - | (1 + # + 1) = 1111 - | (!!) + (1 2) = 1 -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1342: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1//2 +./calc.at:1344: $PREPARSER ./calc input ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29485,18 +29293,11 @@ }eg ' expout || exit 77 stderr: -stderr: -syntax error: invalid character: '#' -syntax error -error: 2222 != 1 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: cat stderr stderr: -syntax error: invalid character: '#' -stderr: -syntax error -error: 2222 != 1 +syntax error, unexpected '/', expecting number or '-' or '(' or '!' input: | | +1 @@ -29505,18 +29306,22 @@ syntax error, unexpected '+' ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: +./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +./calc.at:1346: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + syntax error, unexpected '+' -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29526,8 +29331,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1341: cat stderr +./calc.at:1344: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1346: $PREPARSER ./calc input ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29538,35 +29357,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1342: cat stderr -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: input: +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1344: $PREPARSER ./calc input ./calc.at:1343: cat stderr - | (1 + 1) / (1 - 1) +stderr: +stderr: +syntax error, unexpected invalid token +./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: $PREPARSER ./calc /dev/null -input: -./calc.at:1341: $PREPARSER ./calc input - | (- *) + (1 2) = 1 stderr: stderr: -./calc.at:1342: $PREPARSER ./calc input -error: null divisor -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error, unexpected end of file +syntax error, unexpected invalid token ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -syntax error -syntax error -error: 2222 != 1 -stderr: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor + | 1 2 +./calc.at:1346: $PREPARSER ./calc input syntax error, unexpected end of file stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; +stdout: +stderr: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +1.3: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29576,10 +29395,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -syntax error -error: 2222 != 1 -./calc.at:1341: "$PERL" -pi -e 'use strict; +stderr: +./scanner.at:326: $PREPARSER ./input +1.3: syntax error +stderr: +stdout: +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29589,10 +29410,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1344: cat stderr +stderr: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ./calc.at:1343: cat stderr input: -./calc.at:1341: cat stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./scanner.at:326: $PREPARSER ./input + | 1 = 2 = 3 +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29602,27 +29428,45 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1344: $PREPARSER ./calc input +stderr: +stderr: +464. scanner.at:326: ok +syntax error, unexpected '=' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1343: $PREPARSER ./calc input -475. calc.at:1341: stderr: +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '=' +./calc.at:1346: cat stderr syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' error: 4444 != 1 - ok ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1342: cat stderr +463. scanner.at:326: ok +stdout: +stderr: + +./types.at:139: ./check +input: syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' error: 4444 != 1 - -input: - | (* *) + (*) + (*) -./calc.at:1343: "$PERL" -pi -e 'use strict; +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | 1//2 +./calc.at:1346: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29632,29 +29476,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1343: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -syntax error -syntax error -syntax error -input: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -stderr: -./calc.at:1343: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1342: "$PERL" -pi -e 'use strict; +1.3: syntax error +./calc.at:1344: cat stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29664,7 +29489,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: "$PERL" -pi -e 'use strict; + +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29674,38 +29500,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -479. calc.at:1346: testing Calculator api.pure=full %locations ... -./calc.at:1346: mv calc.y.tmp calc.y - -./calc.at:1342: cat stderr -./calc.at:1343: cat stderr -./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1342: $PREPARSER ./calc input + | + | +1 +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1343: cat stderr input: -stderr: - | (- *) + (1 2) = 1 + | (!!) + (1 2) = 1 ./calc.at:1343: $PREPARSER ./calc input -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '+' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +stderr: syntax error, unexpected number error: 2222 != 1 -stderr: ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '+' +stderr: syntax error, unexpected number error: 2222 != 1 +481. calc.at:1348: testing Calculator parse.error=detailed %locations ... input: - | 1 + 2 * 3 + !- ++ -./calc.at:1342: $PREPARSER ./calc input -stderr: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: mv calc.y.tmp calc.y + + | error +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; +1.1: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29715,11 +29541,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29730,19 +29552,29 @@ }eg ' expout || exit 77 stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1342: cat stderr +./calc.at:1343: cat stderr +482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1350: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1344: cat stderr input: - | 1 + 2 * 3 + !* ++ -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1344: $PREPARSER ./calc /dev/null +1.1: syntax error + | (- *) + (1 2) = 1 +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +syntax error, unexpected end of input +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29752,21 +29584,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error, unexpected end of input stderr: -memory exhausted -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: cat stderr -stderr: -memory exhausted -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1343: $PREPARSER ./calc input -stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr stderr: -./calc.at:1346: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29776,23 +29605,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1344: cat stderr input: - | 1 + 2 * 3 + !- ++ -./calc.at:1343: $PREPARSER ./calc input -stderr: -./calc.at:1342: cat stderr -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | (#) + (#) = 2222 -./calc.at:1342: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29803,14 +29617,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1344: $PREPARSER ./calc input +input: +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 + | 1 = 2 = 3 +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: cat stderr +stderr: +stderr: +1.7: syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !* ++ + | (* *) + (*) + (*) ./calc.at:1343: $PREPARSER ./calc input stderr: -memory exhausted +1.7: syntax error +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; +stderr: +stdout: +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29821,10 +29662,15 @@ }eg ' expout || exit 77 stderr: -memory exhausted -./calc.at:1342: cat stderr -input: -./calc.at:1343: "$PERL" -pi -e 'use strict; +./types.at:139: $PREPARSER ./test +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1348: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1344: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29834,24 +29680,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: cat stderr - | (1 + #) = 1111 -./calc.at:1342: $PREPARSER ./calc input -stderr: +======== Testing with C++ standard flags: '' input: -syntax error: invalid character: '#' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1343: $PREPARSER ./calc input -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1344: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -syntax error: invalid character: '#' ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29862,66 +29695,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr stderr: -stdout: -./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1343: cat stderr -./calc.at:1344: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - +syntax error, unexpected number +error: 2222 != 1 input: input: - | (1 + #) = 1111 -./calc.at:1343: $PREPARSER ./calc input -stderr: -./calc.at:1342: cat stderr -syntax error: invalid character: '#' - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 + | 1 + 2 * 3 + !+ ++ | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1342: $PREPARSER ./calc input + | +1 +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1343: $PREPARSER ./calc input stderr: stderr: -syntax error: invalid character: '#' -./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29931,19 +29723,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: -syntax error: invalid character: '#' -./calc.at:1343: cat stderr +./calc.at:1344: cat stderr +./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: - | 1 2 -./calc.at:1344: $PREPARSER ./calc input input: + | (- *) + (1 2) = 1 stderr: - | (# + 1) = 1111 + | 1 + 2 * 3 + !- ++ +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc input +2.1: syntax error +stderr: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected number +error: 2222 != 1 ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29954,14 +29755,12 @@ }eg ' expout || exit 77 stderr: -stderr: -syntax error: invalid character: '#' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected number +error: 2222 != 1 +./calc.at:1346: cat stderr ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: cat stderr stderr: -syntax error: invalid character: '#' -input: ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29972,8 +29771,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1342: $PREPARSER ./calc input ./calc.at:1344: cat stderr ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -29985,27 +29782,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1346: $PREPARSER ./calc /dev/null +stderr: +1.1: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: -syntax error: invalid character: '#' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 +1.1: syntax error + | (* *) + (*) + (*) ./calc.at:1344: $PREPARSER ./calc input ./calc.at:1343: cat stderr stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -input: - | (1 + # + 1) = 1111 -./calc.at:1343: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30016,8 +29808,18 @@ }eg ' expout || exit 77 stderr: -syntax error: invalid character: '#' -./calc.at:1342: "$PERL" -pi -e 'use strict; +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1343: $PREPARSER ./calc input +stderr: +memory exhausted +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +stderr: +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30027,16 +29829,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr +memory exhausted input: - | error -./calc.at:1344: $PREPARSER ./calc input stderr: -./calc.at:1342: cat stderr -syntax error, unexpected invalid token -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1346: $PREPARSER ./calc input +stdout: +./calc.at:1344: cat stderr stderr: -syntax error, unexpected invalid token +./types.at:139: ./check +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1344: $PREPARSER ./calc input ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30047,54 +29858,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + 1) / (1 - 1) -./calc.at:1342: $PREPARSER ./calc input stderr: -error: null divisor +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: cat stderr -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -error: null divisor -input: -./calc.at:1344: cat stderr - | (1 + 1) / (1 - 1) +./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: + | (#) + (#) = 2222 ./calc.at:1343: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !- ++ stderr: -error: null divisor -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 = 2 = 3 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1344: $PREPARSER ./calc input stderr: -syntax error, unexpected '=' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -syntax error, unexpected '=' -error: null divisor -./calc.at:1342: cat stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30104,8 +29896,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -476. calc.at:1342: ok -./calc.at:1344: cat stderr +./types.at:139: $PREPARSER ./test +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +======== Testing with C++ standard flags: '' ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30116,19 +29912,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | - | +1 -./calc.at:1344: $PREPARSER ./calc input -stderr: -./calc.at:1343: cat stderr - -syntax error, unexpected '+' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '+' -477. calc.at:1343: ok - ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30139,52 +29922,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | (!!) + (1 2) = 1 +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1343: cat stderr +stderr: ./calc.at:1344: cat stderr -./calc.at:1344: $PREPARSER ./calc /dev/null +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -syntax error, unexpected end of input -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 stderr: -syntax error, unexpected end of input -480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations ... -./calc.at:1347: mv calc.y.tmp calc.y - -./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1344: cat stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.11: syntax error +1.1-16: error: 2222 != 1 +stdout: + | 1 + 2 * 3 + !* ++ +./calc.at:1343: $PREPARSER ./calc input ./calc.at:1344: $PREPARSER ./calc input -481. calc.at:1348: testing Calculator parse.error=detailed %locations ... -./calc.at:1348: mv calc.y.tmp calc.y - -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -stdout: -./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1344: "$PERL" -pi -e 'use strict; +memory exhausted +syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30194,7 +29960,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: "$PERL" -ne ' +./calc.at:1347: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -30205,8 +29971,11 @@ || /\t/ )' calc.c -./calc.at:1344: cat stderr -input: +stderr: +stderr: +memory exhausted +syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -30221,50 +29990,15 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1346: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1344: $PREPARSER ./calc input -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -error: 2222 != 1 -stderr: -./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 2 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1347: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: cat stderr +./calc.at:1346: cat stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1347: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -1.3: syntax error input: | (- *) + (1 2) = 1 -./calc.at:1344: $PREPARSER ./calc input -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30274,9 +30008,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 +./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30287,28 +30019,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1343: cat stderr +stdout: ./calc.at:1344: cat stderr input: - | (* *) + (*) + (*) -./calc.at:1344: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 + | 1 2 +./calc.at:1347: $PREPARSER ./calc input stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1346: cat stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' input: - | 1//2 -./calc.at:1346: $PREPARSER ./calc input +1.3: syntax error +input: +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1343: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.3: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; + | (#) + (#) = 2222 +./calc.at:1344: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30318,15 +30066,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +======== Testing with C++ standard flags: '' stderr: -./calc.at:1344: cat stderr -1.3: syntax error -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1344: $PREPARSER ./calc input +syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1346: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30336,16 +30084,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: - | 1 + 2 * 3 + !- ++ -./calc.at:1344: $PREPARSER ./calc input -stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1346: cat stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; + | (* *) + (*) + (*) +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30355,23 +30096,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1344: cat stderr -input: - | error - | 1 + 2 * 3 + !* ++ -./calc.at:1344: $PREPARSER ./calc input ./calc.at:1346: $PREPARSER ./calc input stderr: -memory exhausted -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -stderr: -1.1: syntax error +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30382,6 +30111,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1347: cat stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: cat stderr +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1344: cat stderr +stdout: +input: +input: +./types.at:139: ./check + | 1//2 +./calc.at:1347: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +1.3: syntax error + | (1 + # + 1) = 1111 +./calc.at:1343: $PREPARSER ./calc input +stderr: +input: + | (1 + #) = 1111 +./calc.at:1344: $PREPARSER ./calc input ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30392,27 +30145,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error stderr: -./calc.at:1346: cat stderr syntax error: invalid character: '#' syntax error: invalid character: '#' ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr input: - | 1 = 2 = 3 + | 1 + 2 * 3 + !+ ++ ./calc.at:1346: $PREPARSER ./calc input stderr: -stderr: syntax error: invalid character: '#' -syntax error: invalid character: '#' -1.7: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30422,8 +30172,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: cat stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30433,34 +30183,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1344: cat stderr - | - | +1 -./calc.at:1346: $PREPARSER ./calc input stderr: +./calc.at:1343: cat stderr +./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1347: cat stderr input: - | (1 + #) = 1111 -./calc.at:1344: $PREPARSER ./calc input -2.1: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -2.1: syntax error -syntax error: invalid character: '#' -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1343: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1346: $PREPARSER ./calc input ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30471,17 +30203,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: cat stderr -./calc.at:1346: $PREPARSER ./calc /dev/null stderr: -1.1: syntax error +stderr: ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1344: cat stderr stderr: -1.1: syntax error input: | (# + 1) = 1111 ./calc.at:1344: $PREPARSER ./calc input +error: null divisor +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30492,21 +30224,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: syntax error: invalid character: '#' +stderr: +error: null divisor +input: + | error +./calc.at:1347: $PREPARSER ./calc input +stderr: ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +1.1: syntax error ./calc.at:1346: cat stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -stderr: -./types.at:139: $PREPARSER ./test -syntax error: invalid character: '#' input: stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.1: syntax error +./calc.at:1343: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: ./check + | 1 + 2 * 3 + !* ++ ./calc.at:1346: $PREPARSER ./calc input stderr: -./calc.at:1344: "$PERL" -pi -e 'use strict; +1.14: memory exhausted +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1343: cat stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30516,23 +30271,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.14: memory exhausted +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1344: cat stderr -======== Testing with C++ standard flags: '' -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: +477. calc.at:1343: ok ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30543,39 +30295,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1347: cat stderr +input: +input: | (1 + # + 1) = 1111 ./calc.at:1344: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1347: $PREPARSER ./calc input ./calc.at:1346: cat stderr stderr: stderr: syntax error: invalid character: '#' -stdout: +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (!!) + (1 2) = 1 + +stderr: +stderr: + | (#) + (#) = 2222 ./calc.at:1346: $PREPARSER ./calc input -./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +syntax error: invalid character: '#' +1.7: syntax error stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1347: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30585,6 +30332,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30595,62 +30344,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1347: cat stderr +483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc ... +./calc.at:1351: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1346: cat stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | +1 ./calc.at:1347: $PREPARSER ./calc input -stderr: -stdout: -./types.at:139: ./check -stderr: +input: ./calc.at:1344: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | (1 + #) = 1111 +./calc.at:1346: $PREPARSER ./calc input +stderr: +2.1: syntax error ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: | (1 + 1) / (1 - 1) -./calc.at:1346: cat stderr -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -stderr: -input: -error: null divisor - | (- *) + (1 2) = 1 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1347: $PREPARSER ./calc input -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -1.3: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +2.1: syntax error stderr: +./calc.at:1344: $PREPARSER ./calc input stderr: error: null divisor +1.6: syntax error: invalid character: '#' +./calc.at:1351: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stdout: -./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +error: null divisor ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30661,6 +30404,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stdout: +./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30671,7 +30417,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr ./calc.at:1348: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -30683,15 +30428,6 @@ || /\t/ )' calc.c -input: - | 1//2 -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1346: cat stderr -stderr: -1.3: syntax error -input: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30702,7 +30438,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error +./calc.at:1347: cat stderr +./calc.at:1346: cat stderr +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -30717,83 +30455,42 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1348: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc /dev/null input: stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: cat stderr - | (* *) + (*) + (*) + | (# + 1) = 1111 ./calc.at:1346: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1344: cat stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: stderr: -478. calc.at:1344: stderr: -./calc.at:1347: cat stderr - ok -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | error -./calc.at:1347: $PREPARSER ./calc input stderr: 1.1: syntax error +1.2: syntax error: invalid character: '#' +./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr +478. calc.at:1344: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: -1.1: syntax error -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error input: | 1 2 ./calc.at:1348: $PREPARSER ./calc input - +1.1: syntax error stderr: -./calc.at:1347: cat stderr 1.3: syntax error, unexpected number +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.2: syntax error: invalid character: '#' stderr: -input: - | 1 = 2 = 3 -./calc.at:1347: $PREPARSER ./calc input 1.3: syntax error, unexpected number -stderr: -1.7: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -./calc.at:1346: cat stderr ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30804,8 +30501,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30815,22 +30511,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -input: -./calc.at:1346: $PREPARSER ./calc input - | - | +1 -./calc.at:1347: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr -stderr: -2.1: syntax error -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30840,39 +30521,74 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: stderr: -./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr -482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1350: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - +./calc.at:1347: cat stderr +484. calc.at:1353: testing Calculator %debug ... +./calc.at:1353: mv calc.y.tmp calc.y +./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stdout: +./calc.at:1346: cat stderr +./calc.at:1348: cat stderr +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1347: $PREPARSER ./calc input +input: | 1//2 -./calc.at:1347: cat stderr + | (1 + # + 1) = 1111 ./calc.at:1348: $PREPARSER ./calc input -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1347: $PREPARSER ./calc /dev/null -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' - | 1 + 2 * 3 + !- ++ ./calc.at:1346: $PREPARSER ./calc input stderr: stderr: -1.1: syntax error -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +1.6: syntax error: invalid character: '#' +./calc.at:1350: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.6: syntax error: invalid character: '#' +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30882,7 +30598,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30892,9 +30609,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error -./calc.at:1346: cat stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30904,41 +30619,48 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1348: cat stderr -input: +stderr: +./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1346: cat stderr ./calc.at:1347: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1346: $PREPARSER ./calc input input: | error ./calc.at:1348: $PREPARSER ./calc input +input: +input: stderr: -stderr: -1.14: memory exhausted -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: 1.1: syntax error, unexpected invalid token + | 1 2 +./calc.at:1350: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1346: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +stderr: ./calc.at:1347: $PREPARSER ./calc input stderr: +1.3: syntax error, unexpected number +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.14: memory exhausted +1.11-17: error: null divisor stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.1: syntax error, unexpected invalid token +1.11: syntax error +1.1-16: error: 2222 != 1 ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected number +stderr: +1.11-17: error: null divisor +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30948,7 +30670,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30958,7 +30680,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30968,25 +30690,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1348: cat stderr -./calc.at:1346: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1347: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1348: $PREPARSER ./calc input -stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30996,22 +30700,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: cat stderr +stderr: +stdout: +./types.at:139: ./check +./calc.at:1350: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1348: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: input: ./calc.at:1346: cat stderr 1.7: syntax error, unexpected '=' - | (!!) + (1 2) = 1 -./calc.at:1347: $PREPARSER ./calc input ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1347: cat stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS 1.7: syntax error, unexpected '=' input: - | (1 + #) = 1111 -./calc.at:1346: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1347: $PREPARSER ./calc input +479. calc.at:1346: ok +stderr: stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31023,14 +30745,12 @@ }eg ' expout || exit 77 stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 ./calc.at:1348: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1347: "$PERL" -pi -e 'use strict; + +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31041,7 +30761,10 @@ }eg ' expout || exit 77 input: -./calc.at:1346: "$PERL" -pi -e 'use strict; + | + | +1 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31051,27 +30774,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1348: $PREPARSER ./calc input stderr: 2.1: syntax error, unexpected '+' ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: cat stderr stderr: -input: -./calc.at:1347: cat stderr 2.1: syntax error, unexpected '+' - | (# + 1) = 1111 -./calc.at:1346: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1347: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1347: cat stderr +./calc.at:1350: cat stderr ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31082,13 +30791,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +input: + | (* *) + (*) + (*) +./calc.at:1347: $PREPARSER ./calc input +input: + | error +./calc.at:1350: $PREPARSER ./calc input +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.1: syntax error, unexpected invalid token +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1348: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31098,16 +30823,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 ./calc.at:1348: $PREPARSER ./calc /dev/null stderr: 1.1: syntax error, unexpected end of file ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: cat stderr -stderr: +./calc.at:1350: cat stderr ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31118,15 +30838,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + # + 1) = 1111 -./calc.at:1346: $PREPARSER ./calc input -1.1: syntax error, unexpected end of file -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... +./calc.at:1354: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + stderr: +1.1: syntax error, unexpected end of file +./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: + | 1 = 2 = 3 +./calc.at:1350: $PREPARSER ./calc input ./calc.at:1347: cat stderr +stderr: +1.7: syntax error, unexpected '=' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31137,9 +30867,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.6: syntax error: invalid character: '#' +stderr: +1.7: syntax error, unexpected '=' ./calc.at:1348: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31150,28 +30881,36 @@ }eg ' expout || exit 77 input: +input: + | 1 + 2 * 3 + !+ ++ | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1348: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input +stderr: +./calc.at:1350: cat stderr stderr: -./calc.at:1346: cat stderr 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 input: - | (1 + 1) / (1 - 1) - | (* *) + (*) + (*) -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1347: $PREPARSER ./calc input +stderr: + | + | +1 +./calc.at:1350: $PREPARSER ./calc input +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31182,32 +30921,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1347: $PREPARSER ./calc input ./calc.at:1348: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | (!!) + (1 2) = 1 ./calc.at:1348: $PREPARSER ./calc input -1.11-17: error: null divisor stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error stderr: 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1347: "$PERL" -pi -e 'use strict; +2.1: syntax error, unexpected '+' +stderr: +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31217,7 +30948,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: "$PERL" -pi -e 'use strict; +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31227,9 +30960,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr -./calc.at:1346: cat stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1350: cat stderr +./calc.at:1348: cat stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31239,35 +30972,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1350: $PREPARSER ./calc /dev/null input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1347: $PREPARSER ./calc input -stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -479. calc.at:1346: ./calc.at:1348: cat stderr - ok stderr: -./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: | (- *) + (1 2) = 1 ./calc.at:1348: $PREPARSER ./calc input +1.1: syntax error, unexpected end of file +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 -input: ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - | 1 + 2 * 3 + !- ++ -./calc.at:1347: $PREPARSER ./calc input stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr stderr: stderr: +stdout: 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 -./calc.at:1347: "$PERL" -pi -e 'use strict; +./types.at:139: $PREPARSER ./test +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1347: $PREPARSER ./calc input +1.1: syntax error, unexpected end of file +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.14: memory exhausted +======== Testing with C++ standard flags: '' +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31277,17 +31014,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1347: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted -483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc ... -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31297,7 +31024,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +1.14: memory exhausted +./calc.at:1348: cat stderr +./calc.at:1350: cat stderr +input: +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31307,59 +31040,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: cat stderr -./calc.at:1351: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1348: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1347: $PREPARSER ./calc input -stderr: -./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -input: -stdout: | (* *) + (*) + (*) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' ./calc.at:1348: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -./types.at:139: $PREPARSER ./test -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./calc.at:1350: $PREPARSER ./calc input 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1347: cat stderr 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./calc.at:1350: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - +stderr: +input: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 + | (#) + (#) = 2222 +./calc.at:1347: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31370,8 +31084,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1350: cat stderr ./calc.at:1348: cat stderr +input: +stderr: +stdout: + | (!!) + (1 2) = 1 +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: +stderr: + | 1 + 2 * 3 + !+ ++ ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31382,8 +31119,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: $PREPARSER ./calc input +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +stderr: +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -input: +./calc.at:1347: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -31397,35 +31156,40 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 - | 1 + 2 * 3 + !+ ++ -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1350: $PREPARSER ./calc input -stderr: -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1347: cat stderr -./calc.at:1351: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +stderr: input: | (1 + #) = 1111 +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1347: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1348: $PREPARSER ./calc input stderr: 1.6: syntax error: invalid character: '#' +./calc.at:1350: cat stderr ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1350: $PREPARSER ./calc input +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1348: $PREPARSER ./calc input +./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +1.6: syntax error: invalid character: '#' stderr: +input: + | (- *) + (1 2) = 1 +input: +./calc.at:1350: $PREPARSER ./calc input ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31436,14 +31200,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error, unexpected number -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 + | 1 2 +./calc.at:1351: $PREPARSER ./calc input stderr: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: cat stderr 1.3: syntax error, unexpected number +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1347: cat stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +input: +stdout: +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1348: $PREPARSER ./calc input +1.3: syntax error, unexpected number +./types.at:139: ./check +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +1.14: memory exhausted + | (# + 1) = 1111 +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31453,8 +31249,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: $PREPARSER ./calc input stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31464,35 +31262,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./types.at:139: ./check +1.2: syntax error: invalid character: '#' +stderr: +1.14: memory exhausted +./calc.at:1351: cat stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1350: cat stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: input: -./calc.at:1348: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: +1.2: syntax error: invalid character: '#' input: | 1//2 - | (# + 1) = 1111 +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1348: cat stderr + | (* *) + (*) + (*) ./calc.at:1350: $PREPARSER ./calc input -./calc.at:1347: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1348: $PREPARSER ./calc input -stderr: stderr: stderr: -1.2: syntax error: invalid character: '#' -1.14: memory exhausted -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -stderr: + | (#) + (#) = 2222 +./calc.at:1348: $PREPARSER ./calc input 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -1.14: memory exhausted stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31502,8 +31318,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31513,10 +31329,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: cat stderr -./calc.at:1348: cat stderr -input: -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31526,22 +31341,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1348: $PREPARSER ./calc input -stderr: -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' ./calc.at:1347: cat stderr - | error -./calc.at:1350: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -./calc.at:1347: $PREPARSER ./calc input +./calc.at:1351: cat stderr +./calc.at:1350: cat stderr ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31552,20 +31354,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: +input: + | (1 + # + 1) = 1111 +./calc.at:1348: cat stderr +./calc.at:1347: $PREPARSER ./calc input +input: + | error +./calc.at:1351: $PREPARSER ./calc input stderr: + | 1 + 2 * 3 + !+ ++ stderr: +./calc.at:1350: $PREPARSER ./calc input 1.6: syntax error: invalid character: '#' 1.1: syntax error, unexpected invalid token ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +stderr: +./calc.at:1348: $PREPARSER ./calc input ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr +stderr: stderr: 1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.1: syntax error, unexpected invalid token -input: -./calc.at:1347: "$PERL" -pi -e 'use strict; +stderr: +stderr: +./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.6: syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31575,9 +31397,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1350: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31587,23 +31407,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1347: cat stderr -./calc.at:1350: cat stderr -1.6: syntax error: invalid character: '#' -input: - | (1 + 1) / (1 - 1) -./calc.at:1347: $PREPARSER ./calc input -stderr: -input: -1.11-17: error: null divisor -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1350: $PREPARSER ./calc input -stderr: ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31614,16 +31417,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1351: cat stderr stderr: -1.11-17: error: null divisor ./calc.at:1348: cat stderr -1.7: syntax error, unexpected '=' ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr +stderr: +input: input: + | 1 = 2 = 3 +./calc.at:1351: $PREPARSER ./calc input | (# + 1) = 1111 ./calc.at:1348: $PREPARSER ./calc input -stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; +input: + | (1 + 1) / (1 - 1) +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31633,13 +31444,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' stderr: +stderr: +./calc.at:1347: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' stderr: +1.11-17: error: null divisor +stderr: +1.7: syntax error, unexpected '=' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: cat stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error: invalid character: '#' -./calc.at:1350: "$PERL" -pi -e 'use strict; +stderr: +1.7: syntax error, unexpected '=' +stderr: +1.11-17: error: null divisor +input: +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31649,7 +31472,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !* ++ +./calc.at:1350: $PREPARSER ./calc input +stderr: +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31659,27 +31485,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: cat stderr +1.14: memory exhausted +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1348: cat stderr -./calc.at:1347: cat stderr +./calc.at:1351: cat stderr +stderr: +1.14: memory exhausted +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: input: | | +1 -./calc.at:1350: $PREPARSER ./calc input -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -480. calc.at:1347: | (1 + # + 1) = 1111 +./calc.at:1351: $PREPARSER ./calc input + | (1 + # + 1) = 1111 ./calc.at:1348: $PREPARSER ./calc input - ok +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: 2.1: syntax error, unexpected '+' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.6: syntax error: invalid character: '#' - ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1350: cat stderr +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1347: cat stderr +1.6: syntax error: invalid character: '#' +input: + | (#) + (#) = 2222 +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31689,13 +31542,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -484. calc.at:1353: testing Calculator %debug ... -./calc.at:1353: mv calc.y.tmp calc.y - -./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1350: cat stderr +./calc.at:1350: $PREPARSER ./calc input +480. calc.at:1347: stderr: ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31706,20 +31554,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: $PREPARSER ./calc /dev/null -./calc.at:1353: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -1.1: syntax error, unexpected end of file + ok +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1351: cat stderr ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1348: cat stderr stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1351: $PREPARSER ./calc /dev/null +stderr: 1.1: syntax error, unexpected end of file +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | (1 + 1) / (1 - 1) -./calc.at:1348: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31730,35 +31580,64 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: $PREPARSER ./calc input stderr: stderr: -stdout: -./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +1.1: syntax error, unexpected end of file +1.11-17: error: null divisor +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1350: cat stderr -./calc.at:1351: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - +stderr: 1.11-17: error: null divisor +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1351: cat stderr +./calc.at:1348: cat stderr input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1351: $PREPARSER ./calc input +input: +481. calc.at:1348: ok + | (1 + #) = 1111 ./calc.at:1350: $PREPARSER ./calc input stderr: +stderr: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 + +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31768,31 +31647,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 +486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1355: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1351: cat stderr +./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1350: cat stderr + | (!!) + (1 2) = 1 ./calc.at:1351: $PREPARSER ./calc input +input: stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 + | (# + 1) = 1111 +./calc.at:1350: $PREPARSER ./calc input ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr stderr: -./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31803,14 +31707,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -481. calc.at:1348: ok - -input: -./calc.at:1350: cat stderr - | 1 2 -./calc.at:1351: $PREPARSER ./calc input -485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... -./calc.at:1354: +./calc.at:1351: cat stderr +487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... +./calc.at:1357: if "$POSIXLY_CORRECT_IS_EXPORTED"; then sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y else @@ -31818,23 +31717,43 @@ fi -./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -1.3: syntax error, unexpected number -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: cat stderr input: + | (- *) + (1 2) = 1 +./calc.at:1351: $PREPARSER ./calc input +input: +./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.3: syntax error, unexpected number - | (!!) + (1 2) = 1 + | (1 + # + 1) = 1111 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 ./calc.at:1350: $PREPARSER ./calc input +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 +1.6: syntax error: invalid character: '#' ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 stdout: -./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +1.6: syntax error: invalid character: '#' +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31845,6 +31764,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +438. types.at:139: ok +./calc.at:1350: cat stderr +./calc.at:1351: cat stderr +input: +input: + | (1 + 1) / (1 - 1) +./calc.at:1350: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1351: $PREPARSER ./calc input +stderr: +stderr: +1.11-17: error: null divisor +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' + +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.11-17: error: null divisor +stderr: +stdout: +./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1355: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1353: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -31856,13 +31800,6 @@ || /\t/ )' calc.c -./calc.at:1351: cat stderr -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: - | 1//2 -./calc.at:1351: $PREPARSER ./calc input ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31873,25 +31810,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1350: cat stderr -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -input: - | (- *) + (1 2) = 1 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -31907,17 +31829,19 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1353: $PREPARSER ./calc input -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +482. calc.at:1350: ./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ok ./calc.at:1351: cat stderr stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: Starting parse Entering state 0 Stack now 0 @@ -32936,8 +32860,12 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: + +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1357: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS Starting parse Entering state 0 Stack now 0 @@ -33956,39 +33884,25 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | error -./calc.at:1351: $PREPARSER ./calc input stderr: -1.1: syntax error, unexpected invalid token ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1358: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + input: -./calc.at:1350: cat stderr +./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr | 1 2 ./calc.at:1353: $PREPARSER ./calc input -./calc.at:1351: cat stderr stderr: +./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: Starting parse Entering state 0 Stack now 0 @@ -34009,17 +33923,12 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 -input: ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 = 2 = 3 + | 1 + 2 * 3 + !- ++ ./calc.at:1351: $PREPARSER ./calc input stderr: - | (* *) + (*) + (*) -./calc.at:1350: $PREPARSER ./calc input -1.7: syntax error, unexpected '=' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -34041,16 +33950,6 @@ Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 stderr: -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -34061,16 +33960,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1360: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1351: cat stderr ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -34081,82 +33980,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1351: cat stderr +./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -./calc.at:1350: cat stderr - | - | +1 + | 1 + 2 * 3 + !* ++ ./calc.at:1351: $PREPARSER ./calc input -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1350: $PREPARSER ./calc input -stderr: -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1353: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1351: cat stderr -stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: $PREPARSER ./calc /dev/null stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr +1.14: memory exhausted +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !* ++ -stderr: -./calc.at:1350: $PREPARSER ./calc input stderr: -1.1: syntax error, unexpected end of file -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.14: memory exhausted -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: | 1//2 -1.14: memory exhausted ./calc.at:1353: $PREPARSER ./calc input stderr: -stderr: -1.1: syntax error, unexpected end of file -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr Starting parse Entering state 0 Stack now 0 @@ -34185,8 +34022,6 @@ Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -34197,8 +34032,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1350: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -34227,25 +34060,9 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' ./calc.at:1351: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr input: + | (#) + (#) = 2222 ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -34256,24 +34073,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -input: -./calc.at:1350: $PREPARSER ./calc input +./calc.at:1351: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1351: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' ./calc.at:1353: cat stderr -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1350: "$PERL" -pi -e 'use strict; +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34283,26 +34092,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: cat stderr input: -input: - | (# + 1) = 1111 -./calc.at:1350: $PREPARSER ./calc input | error -stderr: ./calc.at:1353: $PREPARSER ./calc input -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1351: cat stderr stderr: -1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -34311,8 +34105,15 @@ syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 +input: ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1351: $PREPARSER ./calc input stderr: +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1358: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -34321,36 +34122,9 @@ syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: cat stderr -./calc.at:1351: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1350: $PREPARSER ./calc input stderr: 1.6: syntax error: invalid character: '#' -input: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34360,15 +34134,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1351: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34378,32 +34144,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1350: cat stderr -./calc.at:1353: cat stderr +./calc.at:1351: cat stderr input: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1350: $PREPARSER ./calc input +./calc.at:1353: cat stderr + | (# + 1) = 1111 +./calc.at:1351: $PREPARSER ./calc input stderr: +./calc.at:1360: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS input: -1.11-17: error: null divisor -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 = 2 = 3 ./calc.at:1353: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor +1.2: syntax error: invalid character: '#' stderr: Starting parse Entering state 0 @@ -34445,17 +34199,6 @@ Cleanup: discarding lookahead token '=' (1.1: ) Stack now 0 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: cat stderr stderr: Starting parse Entering state 0 @@ -34496,17 +34239,7 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.1: ) Stack now 0 -input: - | (- *) + (1 2) = 1 -./calc.at:1351: $PREPARSER ./calc input -stderr: -./calc.at:1350: cat stderr -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34516,11 +34249,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -482. calc.at:1350: ok -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1351: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34530,21 +34260,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1351: cat stderr -./calc.at:1353: cat stderr input: - - | (* *) + (*) + (*) + | (1 + # + 1) = 1111 ./calc.at:1351: $PREPARSER ./calc input stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.6: syntax error: invalid character: '#' ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.6: syntax error: invalid character: '#' input: | | +1 @@ -34586,11 +34310,7 @@ Cleanup: discarding lookahead token '+' (1.1: ) Stack now 0 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr stderr: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1351: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -34616,23 +34336,11 @@ Stack now 0 Cleanup: discarding lookahead token '+' (1.1: ) Stack now 0 +./calc.at:1351: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1351: $PREPARSER ./calc input stderr: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./types.at:139: $PREPARSER ./test -486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1355: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -34643,18 +34351,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1351: $PREPARSER ./calc input -449. types.at:139: ok -stderr: -./calc.at:1353: cat stderr +1.11-17: error: null divisor ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr stderr: +1.11-17: error: null divisor ./calc.at:1353: $PREPARSER ./calc /dev/null -stderr: ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -34665,6 +34367,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -34674,8 +34377,8 @@ Cleanup: discarding lookahead token "end of input" (1.1: ) Stack now 0 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr stderr: +./calc.at:1351: cat stderr Starting parse Entering state 0 Stack now 0 @@ -34684,25 +34387,7 @@ syntax error Cleanup: discarding lookahead token "end of input" (1.1: ) Stack now 0 - -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1351: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +483. calc.at:1351: ok ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -34713,44 +34398,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1351: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1351: $PREPARSER ./calc input ./calc.at:1353: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... -./calc.at:1355: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1357: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1353: $PREPARSER ./calc input -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: cat stderr stderr: -./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y Starting parse Entering state 0 Stack now 0 @@ -35387,12 +35040,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -input: - | (1 + #) = 1111 -./calc.at:1351: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -35403,20 +35050,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1362: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + ./calc.at:1353: cat stderr -1.6: syntax error: invalid character: '#' +./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | (!!) + (1 2) = 1 ./calc.at:1353: $PREPARSER ./calc input stderr: @@ -35561,7 +35206,6 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr stderr: Starting parse Entering state 0 @@ -35703,10 +35347,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -input: - | (# + 1) = 1111 -./calc.at:1351: $PREPARSER ./calc input -stderr: ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -35717,27 +35357,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1353: cat stderr -stderr: -1.2: syntax error: invalid character: '#' input: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | (- *) + (1 2) = 1 ./calc.at:1353: $PREPARSER ./calc input -./calc.at:1357: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: -./calc.at:1351: cat stderr Starting parse Entering state 0 Stack now 0 @@ -35888,7 +35512,6 @@ Cleanup: popping nterm input (1.1: ) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -36037,10 +35660,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) - | (1 + # + 1) = 1111 -./calc.at:1351: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -36051,25 +35670,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1353: cat stderr -1.6: syntax error: invalid character: '#' +./calc.at:1354: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + input: | (* *) + (*) + (*) -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1353: $PREPARSER ./calc input -./calc.at:1351: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1354: $PREPARSER ./calc input stderr: +./calc.at:1362: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS Starting parse Entering state 0 Stack now 0 @@ -36374,410 +36009,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -input: - | (1 + 1) / (1 - 1) -./calc.at:1351: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -stderr: -1.11-17: error: null divisor -./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1354: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: cat stderr -./calc.at:1351: cat stderr -483. calc.at:1351: ok -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1353: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) - -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1353: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: Starting parse Entering state 0 @@ -37797,16 +37028,16 @@ Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1358: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1353: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -38826,14 +38057,36 @@ Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1353: cat stderr input: -./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | 1 + 2 * 3 + !* ++ -./calc.at:1353: $PREPARSER ./calc input -input: -stderr: | 1 2 ./calc.at:1354: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1353: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -38904,14 +38157,13 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted + $2 = token '+' (1.1: ) Stack now 0 8 21 Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) @@ -38937,7 +38189,6 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -39009,49 +38260,16 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted + $2 = token '+' (1.1: ) Stack now 0 8 21 Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: cat stderr ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -39062,255 +38280,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1354: cat stderr - | (#) + (#) = 2222 -./calc.at:1353: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) input: | 1//2 ./calc.at:1354: $PREPARSER ./calc input @@ -39343,16 +38314,10 @@ Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1353: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -39381,244 +38346,169 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: cat stderr -input: -./calc.at:1358: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1353: cat stderr - | error -./calc.at:1354: $PREPARSER ./calc input -stderr: -input: - | (1 + #) = 1111 -./calc.at:1353: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 +Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 +Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1353: "$PERL" -pi -e 'use strict; +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -39628,7 +38518,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1354: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -39638,205 +38528,222 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: cat stderr +./calc.at:1354: cat stderr input: - | (# + 1) = 1111 -./calc.at:1353: $PREPARSER ./calc input +./calc.at:1353: cat stderr + | error +stderr: +./calc.at:1354: $PREPARSER ./calc input +stdout: stderr: +./types.at:139: $PREPARSER ./test Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +input: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1353: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +======== Testing with C++ standard flags: '' +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stdout: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./types.at:139: $PREPARSER ./test ./calc.at:1354: cat stderr +stderr: input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 = 2 = 3 ./calc.at:1354: $PREPARSER ./calc input ./calc.at:1353: "$PERL" -pi -e 'use strict; @@ -39850,6 +38757,7 @@ }eg ' expout || exit 77 stderr: +======== Testing with C++ standard flags: '' Starting parse Entering state 0 Stack now 0 @@ -39890,6 +38798,7 @@ Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: Starting parse Entering state 0 @@ -39931,7 +38840,6 @@ Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 ./calc.at:1353: cat stderr -input: ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -39942,10 +38850,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 +input: + | (#) + (#) = 2222 ./calc.at:1353: $PREPARSER ./calc input -stderr: ./calc.at:1354: cat stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -39955,26 +38864,7 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 @@ -39986,55 +38876,81 @@ Entering state 11 Stack now 0 4 11 Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) -Stack now 0 4 +Stack now 0 8 21 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Reading a token Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -40042,7 +38958,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -40061,6 +38977,7 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: Starting parse Entering state 0 @@ -40071,26 +38988,7 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 @@ -40102,55 +39000,81 @@ Entering state 11 Stack now 0 4 11 Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) -Stack now 0 4 +Stack now 0 8 21 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Reading a token Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -40158,7 +39082,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -40176,7 +39100,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -input: | | +1 ./calc.at:1354: $PREPARSER ./calc input @@ -40208,16 +39131,6 @@ Stack now 0 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -40243,11 +39156,46 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1353: cat stderr +./calc.at:1354: cat stderr +./calc.at:1354: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + 1) / (1 - 1) + | (1 + #) = 1111 ./calc.at:1353: $PREPARSER ./calc input stderr: +stdout: +stderr: +./types.at:139: ./check Starting parse Entering state 0 Stack now 0 @@ -40272,98 +39220,55 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) + $2 = token error (1.1: ) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -40371,7 +39276,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -40390,16 +39295,8 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stderr: Starting parse Entering state 0 @@ -40425,98 +39322,55 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) + $2 = token error (1.1: ) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -40524,7 +39378,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -40542,19 +39396,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1354: cat stderr -./calc.at:1354: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -40573,11 +39414,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1354: cat stderr +stderr: +stdout: +./types.at:139: ./check input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1354: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1353: cat stderr stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -40888,17 +39745,7 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of inp./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -ut. +Now at end of input. Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 @@ -40906,6 +39753,9 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1353: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -41224,6 +40074,198 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -41234,10 +40276,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: cat stderr ./calc.at:1354: cat stderr -484. calc.at:1353: ok input: +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | (!!) + (1 2) = 1 ./calc.at:1354: $PREPARSER ./calc input stderr: @@ -41381,8 +40431,12 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +./calc.at:1353: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1353: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -41524,6 +40578,123 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -41534,20 +40705,135 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) ./calc.at:1354: cat stderr -489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1360: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - input: | (- *) + (1 2) = 1 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1354: $PREPARSER ./calc input -./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: Starting parse Entering state 0 @@ -41698,6 +40984,7 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr stderr: Starting parse Entering state 0 @@ -41847,6 +41134,155 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: + | (1 + 1) / (1 - 1) +./calc.at:1353: $PREPARSER ./calc input +stderr: +stdout: +./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -41857,11 +41293,188 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) ./calc.at:1354: cat stderr +./calc.at:1355: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + input: | (* *) + (*) + (*) ./calc.at:1354: $PREPARSER ./calc input +input: +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 Starting parse Entering state 0 Stack now 0 @@ -42014,6 +41627,7 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -42166,6 +41780,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1353: cat stderr +stderr: +stdout: +./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -42176,14 +41794,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1354: cat stderr +484. calc.at:1353: ok stderr: -stdout: -./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1355: "$PERL" -ne ' +./calc.at:1357: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -42194,352 +41807,7 @@ || /\t/ )' calc.c calc.h -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1355: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1354: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: +./calc.at:1354: cat stderr Starting parse Entering state 0 Stack now 0 @@ -43558,18 +42826,8 @@ Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -43889,8 +43147,7 @@ Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by ./calc.at:1354: cat stderr -rule 4 (line 97): +Reducing stack by rule 4 (line 97): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) @@ -44589,9 +43846,24 @@ Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !+ ++ ./calc.at:1354: $PREPARSER ./calc input +input: + + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1357: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -44663,22 +43935,21 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | 1 2 -./calc.at:1355: $PREPARSER ./calc input stderr: +./calc.at:1355: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -44749,18 +44020,20 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -44781,8 +44054,8 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -44797,111 +44070,50 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1355: cat stderr - | (#) + (#) = 2222 -./calc.at:1354: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 Next token is token '=' (1.11: ) @@ -44909,31 +44121,31 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -44943,457 +44155,2136 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1//2 -./calc.at:1355: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 -Stack now 0 1 +Stack now 0 6 2 10 24 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1354: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1355: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Stack now 0 4 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | error -./calc.at:1355: $PREPARSER ./calc input -stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -input: - | (# + 1) = 1111 -stderr: +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +stdout: + | 1 + 2 * 3 + !- ++ +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1354: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +stderr: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +======== Testing with C++ standard flags: '' +input: ./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -45404,20 +46295,422 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' + | 1 2 +./calc.at:1357: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 ./calc.at:1355: cat stderr -./calc.at:1357: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +input: +491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1363: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + | 1//2 +./calc.at:1355: $PREPARSER ./calc input +input: +stderr: + | 1 + 2 * 3 + !* ++ +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1357: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1355: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +input: + | error +stderr: +./calc.at:1355: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (#) + (#) = 2222 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: Starting parse Entering state 0 @@ -45440,64 +46733,90 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -45514,8 +46833,28 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: Starting parse Entering state 0 Stack now 0 @@ -45537,17 +46876,266 @@ Entering state 11 Stack now 0 4 11 Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | error +./calc.at:1357: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1354: cat stderr +stderr: +stderr: +stdout: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr +./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stdout: +./types.at:139: ./check +./calc.at:1358: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1357: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1355: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1354: $PREPARSER ./calc input +stderr: +input: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 @@ -45611,24 +47199,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: | 1 = 2 = 3 -./calc.at:1355: $PREPARSER ./calc input -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 ./calc.at:1357: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -45669,22 +47243,10 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1354: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1363: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS Starting parse Entering state 0 Stack now 0 @@ -45724,7 +47286,6 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -stderr: Starting parse Entering state 0 Stack now 0 @@ -45765,64 +47326,48 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -45839,6 +47384,83 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +stdout: +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $PREPARSER ./test +./calc.at:1354: cat stderr +stderr: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1355: cat stderr stderr: Starting parse Entering state 0 @@ -45853,11 +47475,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -45868,11 +47492,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -45883,6 +47509,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 114): @@ -45904,6 +47531,7 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.13: 7) Shifting token number (1.13: 7) @@ -45914,6 +47542,7 @@ -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): @@ -45938,6 +47567,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (2.1: 1) Shifting token number (2.1: 1) @@ -45948,11 +47578,13 @@ -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 +Return for a new token: Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -45963,16 +47595,19 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 +Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 +Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 +Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -45983,6 +47618,7 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 +Return for a new token: Reading a token Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 124): @@ -46011,11 +47647,13 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -46026,6 +47664,7 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 124): @@ -46058,6 +47697,7 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -46074,11 +47714,13 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -46089,11 +47731,13 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 +Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 +Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -46104,6 +47748,7 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 +Return for a new token: Reading a token Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 125): @@ -46124,11 +47769,13 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -46139,6 +47786,7 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 124): @@ -46159,8 +47807,7 @@ Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -rule 4 (line 97): +Reducing stack by rule 4 (line 97): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) @@ -46172,16 +47819,19 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 +Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -46192,6 +47842,7 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 +Return for a new token: Reading a token Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 124): @@ -46211,11 +47862,13 @@ -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -46226,6 +47879,7 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 125): @@ -46239,6 +47893,7 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -46249,6 +47904,7 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -46274,6 +47930,7 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -46290,21 +47947,25 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 +Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 +Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -46315,6 +47976,7 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 +Return for a new token: Reading a token Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 124): @@ -46341,11 +48003,13 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -46356,6 +48020,7 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 124): @@ -46388,6 +48053,7 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -46404,6 +48070,7 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -46414,11 +48081,13 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -46429,6 +48098,7 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 113): @@ -46442,6 +48112,7 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -46452,6 +48123,7 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 113): @@ -46465,11 +48137,13 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -46480,6 +48154,7 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 124): @@ -46512,6 +48187,7 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -46522,16 +48198,19 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 +Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -46542,11 +48221,13 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 +Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 +Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -46557,6 +48238,7 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 113): @@ -46577,6 +48259,7 @@ -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 113): @@ -46590,6 +48273,7 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -46600,6 +48284,7 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -46625,6 +48310,7 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -46641,6 +48327,7 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -46651,11 +48338,13 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -46666,11 +48355,13 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 +Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -46681,6 +48372,7 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 +Return for a new token: Reading a token Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 125): @@ -46702,6 +48394,7 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -46712,6 +48405,7 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -46737,11 +48431,13 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -46752,11 +48448,13 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 +Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 +Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -46767,6 +48465,7 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 +Return for a new token: Reading a token Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 125): @@ -46787,11 +48486,13 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -46802,6 +48503,7 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 125): @@ -46815,6 +48517,7 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -46825,6 +48528,7 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -46850,6 +48554,7 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -46858,9 +48563,13 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1354: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1357: cat stderr Starting parse Entering state 0 Stack now 0 @@ -46874,11 +48583,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -46889,11 +48600,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -46904,6 +48617,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 114): @@ -46925,6 +48639,7 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.13: 7) Shifting token number (1.13: 7) @@ -46935,6 +48650,7 @@ -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): @@ -46959,6 +48675,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (2.1: 1) Shifting token number (2.1: 1) @@ -46969,11 +48686,13 @@ -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 +Return for a new token: Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -46984,16 +48703,19 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 +Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 +Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 +Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -47004,6 +48726,7 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 +Return for a new token: Reading a token Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 124): @@ -47032,11 +48755,13 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -47047,6 +48772,7 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 124): @@ -47079,6 +48805,7 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -47095,11 +48822,13 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -47110,11 +48839,13 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 +Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 +Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -47125,6 +48856,7 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 +Return for a new token: Reading a token Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 125): @@ -47145,11 +48877,13 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -47160,6 +48894,7 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 124): @@ -47180,121 +48915,7 @@ Shifting token '\n' (4.10-5.0: ) Entering state 25 Stack now 0 6 8 25 -Reducing stack by Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -rule 4 (line 97): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) @@ -47306,16 +48927,19 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 +Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -47326,6 +48950,7 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 +Return for a new token: Reading a token Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 124): @@ -47345,11 +48970,13 @@ -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -47360,6 +48987,7 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 125): @@ -47373,6 +49001,7 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -47383,6 +49012,7 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -47408,6 +49038,7 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -47424,21 +49055,25 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 +Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 +Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -47449,6 +49084,7 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 +Return for a new token: Reading a token Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 124): @@ -47475,11 +49111,13 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -47490,6 +49128,7 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 124): @@ -47522,6 +49161,7 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -47538,6 +49178,7 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -47548,11 +49189,13 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -47563,6 +49206,7 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 113): @@ -47576,6 +49220,7 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -47586,6 +49231,7 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 113): @@ -47599,11 +49245,13 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -47614,6 +49262,7 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 124): @@ -47646,6 +49295,7 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -47656,16 +49306,19 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 +Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -47676,11 +49329,13 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 +Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 +Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -47691,6 +49346,7 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 113): @@ -47711,6 +49367,7 @@ -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 113): @@ -47724,6 +49381,7 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -47734,6 +49392,7 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -47759,6 +49418,7 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -47775,6 +49435,7 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -47785,11 +49446,13 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -47800,11 +49463,13 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 +Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -47815,6 +49480,7 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 +Return for a new token: Reading a token Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 125): @@ -47836,6 +49502,7 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -47846,6 +49513,7 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -47871,11 +49539,13 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -47886,11 +49556,13 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 +Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 +Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -47901,6 +49573,7 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 +Return for a new token: Reading a token Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 125): @@ -47921,11 +49594,13 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -47936,6 +49611,7 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 125): @@ -47949,6 +49625,7 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -47959,6 +49636,7 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -47984,6 +49662,7 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -47992,59 +49671,216 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -./calc.at:1355: cat stderr - | 1 2 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1354: cat stderr + | + | +1 +======== Testing with C++ standard flags: '' stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1355: $PREPARSER ./calc input +stdout: | | +1 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +input: +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: + | 1 2 +./calc.at:1358: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -48071,7 +49907,45 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -48085,6 +49959,7 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token number (1.3: 2) 1.3: syntax error, unexpected number @@ -48092,6 +49967,44 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 stderr: Starting parse Entering state 0 @@ -48118,9 +50031,64 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +input: +./calc.at:1354: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | (1 + 1) / (1 - 1) -./calc.at:1354: $PREPARSER ./calc input ./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -48131,6 +50099,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1355: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1358: cat stderr +./calc.at:1355: $PREPARSER ./calc /dev/null +stderr: stderr: Starting parse Entering state 0 @@ -48156,107 +50130,80 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -48273,20 +50220,20 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1357: cat stderr -stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: +./calc.at:1357: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 @@ -48311,107 +50258,80 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -48428,50 +50348,19 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: | 1//2 -./calc.at:1357: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input stderr: -./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: $PREPARSER ./calc /dev/null stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -48480,8 +50369,6 @@ 1.1: syntax error, unexpected end of file Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -48496,380 +50383,82 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1354: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -485. calc.at:1354: ok -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: cat stderr -./calc.at:1355: cat stderr -input: - - | error -./calc.at:1357: $PREPARSER ./calc input -input: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1355: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -48879,2856 +50468,20 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1355: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | (!!) + (1 2) = 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1355: $PREPARSER ./calc input -stderr: -490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -stderr: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1357: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: cat stderr -input: - | - | +1 -./calc.at:1357: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: -stdout: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | (- *) + (1 2) = 1 -./calc.at:1355: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -stderr: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: cat stderr -======== Testing with C++ standard flags: '' -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: $PREPARSER ./calc /dev/null -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: cat stderr -stderr: -input: - | (* *) + (*) + (*) -./calc.at:1355: $PREPARSER ./calc input -stdout: -./calc.at:1357: cat stderr -input: -stderr: -./calc.at:1362: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1357: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: cat stderr -input: -./calc.at:1357: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1355: $PREPARSER ./calc input -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (!!) + (1 2) = 1 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 + 2 * 3 + !- ++ -stderr: -stdout: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: ./check -./calc.at:1357: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1355: $PREPARSER ./calc input -input: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1357: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Return for a new token: +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -51739,19 +50492,16 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 -Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 -Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 -Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -51762,7 +50512,6 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 -Return for a new token: Reading a token Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 124): @@ -51791,13 +50540,11 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -51808,7 +50555,6 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 124): @@ -51841,7 +50587,6 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -51858,13 +50603,11 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 -Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -51875,13 +50618,11 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 -Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 -Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -51892,7 +50633,6 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 -Return for a new token: Reading a token Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 125): @@ -51913,13 +50653,11 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -51930,7 +50668,6 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 124): @@ -51963,19 +50700,16 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 -Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 -Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -51986,7 +50720,6 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 -Return for a new token: Reading a token Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 124): @@ -52006,13 +50739,11 @@ -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -52023,7 +50754,6 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 125): @@ -52037,7 +50767,6 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -52048,7 +50777,6 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -52074,7 +50802,6 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -52091,25 +50818,21 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 -Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 -Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 -Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -52120,7 +50843,6 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 -Return for a new token: Reading a token Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 124): @@ -52147,13 +50869,11 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -52164,7 +50884,6 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 124): @@ -52197,7 +50916,6 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -52214,7 +50932,6 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -52225,13 +50942,11 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -52242,7 +50957,6 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 113): @@ -52256,7 +50970,6 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -52267,7 +50980,6 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 113): @@ -52281,13 +50993,11 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -52298,7 +51008,6 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 124): @@ -52331,7 +51040,6 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -52342,19 +51050,16 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 -Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -52365,13 +51070,11 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 -Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 -Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -52382,7 +51085,6 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 -Return for a new token: Reading a token Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 113): @@ -52403,7 +51105,6 @@ -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 113): @@ -52417,7 +51118,6 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -52428,7 +51128,6 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -52454,7 +51153,6 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -52471,7 +51169,6 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -52482,13 +51179,11 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -52499,13 +51194,11 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 -Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -52516,7 +51209,6 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 -Return for a new token: Reading a token Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 125): @@ -52538,7 +51230,6 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -52549,7 +51240,6 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -52575,13 +51265,11 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 -Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -52592,13 +51280,11 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 -Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 -Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -52609,7 +51295,6 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 -Return for a new token: Reading a token Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 125): @@ -52630,13 +51315,11 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -52647,7 +51330,6 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 125): @@ -52661,7 +51343,6 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -52672,7 +51353,6 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -52698,7 +51378,6 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -52707,157 +51386,49 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -52872,222 +51443,34 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -Starting parse -Entering state 0 +Error: popping nterm exp (1.1: 1) Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -53101,13 +51484,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -53118,13 +51499,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -53135,7 +51514,6 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 114): @@ -53157,7 +51535,6 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.13: 7) Shifting token number (1.13: 7) @@ -53168,7 +51545,6 @@ -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): @@ -53193,7 +51569,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (2.1: 1) Shifting token number (2.1: 1) @@ -53204,13 +51579,11 @@ -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 -Return for a new token: Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -53221,19 +51594,16 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 -Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 -Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 -Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -53244,7 +51614,6 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 -Return for a new token: Reading a token Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 124): @@ -53273,13 +51642,11 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -53290,7 +51657,6 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 124): @@ -53323,7 +51689,6 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -53340,13 +51705,11 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 -Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -53357,13 +51720,11 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 -Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 -Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -53374,7 +51735,6 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 -Return for a new token: Reading a token Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 125): @@ -53395,13 +51755,11 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -53412,7 +51770,6 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 124): @@ -53445,19 +51802,16 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 -Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 -Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -53468,7 +51822,6 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 -Return for a new token: Reading a token Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 124): @@ -53488,13 +51841,11 @@ -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -53505,7 +51856,6 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 125): @@ -53519,7 +51869,6 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -53530,7 +51879,6 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -53556,7 +51904,6 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -53573,25 +51920,21 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 -Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 -Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 -Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -53602,7 +51945,6 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 -Return for a new token: Reading a token Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 124): @@ -53629,13 +51971,11 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -53646,7 +51986,6 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 124): @@ -53679,7 +52018,6 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -53696,7 +52034,6 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -53707,13 +52044,11 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -53724,7 +52059,6 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 113): @@ -53738,7 +52072,6 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -53749,7 +52082,6 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 113): @@ -53763,13 +52095,11 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -53780,7 +52110,6 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 124): @@ -53813,7 +52142,6 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -53824,19 +52152,16 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 -Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -53847,13 +52172,11 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 -Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 -Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -53864,7 +52187,6 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 -Return for a new token: Reading a token Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 113): @@ -53885,7 +52207,6 @@ -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 113): @@ -53899,7 +52220,6 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -53910,7 +52230,6 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -53936,7 +52255,6 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -53953,7 +52271,6 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -53964,13 +52281,11 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -53981,13 +52296,11 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 -Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -53998,7 +52311,6 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 -Return for a new token: Reading a token Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 125): @@ -54020,7 +52332,6 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -54031,7 +52342,6 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -54057,13 +52367,11 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 -Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -54074,13 +52382,11 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 -Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 -Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -54091,7 +52397,6 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 -Return for a new token: Reading a token Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 125): @@ -54112,13 +52417,11 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -54129,7 +52432,6 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 125): @@ -54143,7 +52445,6 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -54154,7 +52455,6 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -54180,7 +52480,6 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -54189,67 +52488,19 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 2 -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1355: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -54259,10 +52510,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1357: cat stderr input: - | 1 + 2 * 3 + !* ++ -./calc.at:1358: "$PERL" -pi -e 'use strict; +input: +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -54272,11 +52522,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1355: $PREPARSER ./calc input -input: + | (1 + 1) / (1 - 1) + | 1 2 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1360: $PREPARSER ./calc input ./calc.at:1358: cat stderr - | (* *) + (*) + (*) -./calc.at:1357: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1355: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -54292,75 +52545,14 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 stderr: +./calc.at:1357: cat stderr Starting parse Entering state 0 Stack now 0 @@ -54370,123 +52562,113 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 8 23 4 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -54494,7 +52676,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -54512,13 +52694,13 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error ./calc.at:1358: $PREPARSER ./calc input stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS Starting parse Entering state 0 Stack now 0 @@ -54528,123 +52710,113 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 8 23 4 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -54652,7 +52824,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -54675,278 +52847,303 @@ Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: cat stderr -./calc.at:1355: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1355: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1357: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -54956,14 +53153,16 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. +Now at en | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +d of input. Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -54978,179 +53177,325 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 21 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55172,370 +53517,312 @@ Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1355: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1355: $PREPARSER ./calc input -input: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1357: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55552,174 +53839,350 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -55736,6 +54199,16 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: cat stderr +stderr: +input: +./calc.at:1355: cat stderr +./calc.at:1358: cat stderr +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | 1//2 +./calc.at:1360: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -55751,84 +54224,25 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 input: +input: +485. calc.at:1354: ok + | (!!) + (1 2) = 1 +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -55839,36 +54253,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1355: cat stderr - | error + | 1 = 2 = 3 ./calc.at:1358: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: cat stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 - | (#) + (#) = 2222 -./calc.at:1357: $PREPARSER ./calc input stderr: -input: - | (# + 1) = 1111 -./calc.at:1355: $PREPARSER ./calc input +stderr: +./calc.at:1357: cat stderr Starting parse Entering state 0 Stack now 0 @@ -55878,92 +54295,111 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -55990,106 +54426,95 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -56108,92 +54533,111 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -56220,7 +54664,22 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: "$PERL" -pi -e 'use strict; + +input: + | (!!) + (1 2) = 1 +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1360: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -56230,7 +54689,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: cat stderr stderr: Starting parse Entering state 0 @@ -56241,280 +54699,121 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 = 2 = 3 -./calc.at:1358: $PREPARSER ./calc input -stderr: -./calc.at:1357: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1357: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: -./calc.at:1355: cat stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 12 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -56532,17 +54831,9 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1358: cat stderr stderr: +./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 @@ -56552,79 +54843,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -56642,8 +54975,14 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) input: - | (1 + # + 1) = 1111 + | (- *) + (1 2) = 1 +input: + | + | +1 +./calc.at:1358: $PREPARSER ./calc input ./calc.at:1355: $PREPARSER ./calc input +input: + | error ./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -56654,6 +54993,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1360: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -56664,95 +55004,129 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -56769,14 +55143,47 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: cat stderr -./calc.at:1357: cat stderr ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: - | - | +1 -./calc.at:1358: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1357: cat stderr +stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -56786,95 +55193,129 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -56918,37 +55359,28 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (# + 1) = 1111 + | (- *) + (1 2) = 1 ./calc.at:1357: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -56959,76 +55391,129 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -57045,7 +55530,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -57055,8 +55541,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1355: cat stderr Starting parse Entering state 0 @@ -57067,76 +55563,129 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -57153,7 +55702,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1358: cat stderr +./calc.at:1358: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -57163,10 +55715,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1360: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + 1) / (1 - 1) + | (* *) + (*) + (*) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 ./calc.at:1355: $PREPARSER ./calc input -./calc.at:1357: "$PERL" -pi -e 'use strict; +492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +input: +./calc.at:1364: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -57176,8 +55757,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: cat stderr -stderr: +./calc.at:1357: cat stderr stderr: Starting parse Entering state 0 @@ -57188,113 +55768,123 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -57302,7 +55892,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -57320,10 +55910,16 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: +./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -./calc.at:1358: $PREPARSER ./calc /dev/null +input: + | (* *) + (*) + (*) +./calc.at:1358: cat stderr +./calc.at:1357: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1360: $PREPARSER ./calc input +stderr: +input: stderr: Starting parse Entering state 0 @@ -57334,113 +55930,123 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -57448,7 +56054,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -57466,25 +56072,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1357: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1357: $PREPARSER ./calc input -451. types.at:139: ok -stderr: -stderr: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -57494,95 +56082,132 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -57599,106 +56224,184 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1358: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 4 -Shifting token error (1.2-6: ) +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -57715,195 +56418,376 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1355: cat stderr -./calc.at:1357: cat stderr -./calc.at:1358: cat stderr -486. calc.at:1355: ok -stdout: -./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1360: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1358: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1357: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 4 12 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 4 12 21 30 +Stack now 0 8 21 4 12 21 30 +Return for a new token: Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 +Return for a new token: Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Return for a new token: Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -57912,6 +56796,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -57920,22 +56805,29 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 @@ -58281,8 +57173,397 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: cat stderr +input: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1357: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1358: cat stderr +stderr: + | + | +1 ./calc.at:1360: $PREPARSER ./calc input +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: + | (!!) + (1 2) = 1 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -58292,114 +57573,876 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 4 12 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 -Stack now 0 4 12 21 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token ')' (1.7: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: +stdout: +input: + | 1 + 2 * 3 + !- ++ +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: +./calc.at:1355: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Return for a new token: +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 +Return for a new token: Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !- ++ +./calc.at:1357: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1360: cat stderr +======== Testing with C++ standard flags: '' +./calc.at:1358: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1360: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +input: +./calc.at:1357: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1355: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +stderr: +stderr: +input: + | 1 + 2 * 3 + !* ++ +stdout: +./calc.at:1357: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Return for a new token: +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -58407,7 +58450,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -58417,6 +58460,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -58426,8 +58470,94 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | 1 + 2 * 3 + !* ++ +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -58438,6 +58568,491 @@ Stack now 0 4 Return for a new token: Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Return for a new token: +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1364: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1362: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1360: cat stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +stderr: +./calc.at:1362: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1358: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: $PREPARSER ./calc input +input: +./calc.at:1355: cat stderr + | (#) + (#) = 2222 +./calc.at:1357: $PREPARSER ./calc input +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token Next token is token ')' (1.2: ) 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) @@ -58454,19 +59069,16 @@ -> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.6: ) Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.7: 1) Shifting token number (1.7: 1) @@ -58477,13 +59089,11 @@ -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token '+' (1.9: ) Shifting token '+' (1.9: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token number (1.11: 1) Shifting token number (1.11: 1) @@ -58494,7 +59104,6 @@ -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 -Return for a new token: Reading a token Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): @@ -58508,7 +59117,6 @@ Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token number (1.15: 1) Shifting token number (1.15: 1) @@ -58519,7 +59127,6 @@ -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 -Return for a new token: Reading a token Next token is token '+' (1.17: ) Reducing stack by rule 7 (line 112): @@ -58533,7 +59140,6 @@ Shifting token '+' (1.17: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token ')' (1.18: ) 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -58555,7 +59161,6 @@ -> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): @@ -58569,13 +59174,11 @@ Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.22: ) Shifting token '(' (1.22: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token '*' (1.23: ) 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -58589,7 +59192,6 @@ Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token '*' (1.25: ) Error: discarding token '*' (1.25: ) @@ -58598,7 +59200,6 @@ Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token '*' (1.27: ) Error: discarding token '*' (1.27: ) @@ -58607,7 +59208,6 @@ Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) @@ -58620,7 +59220,6 @@ -> $$ = nterm exp (1.22-28: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 112): @@ -58634,13 +59233,11 @@ Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.32: ) Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.33: 1) Shifting token number (1.33: 1) @@ -58651,13 +59248,11 @@ -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token '*' (1.35: ) Shifting token '*' (1.35: ) Entering state 22 Stack now 0 8 21 4 12 22 -Return for a new token: Reading a token Next token is token number (1.37: 2) Shifting token number (1.37: 2) @@ -58668,7 +59263,6 @@ -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 -Return for a new token: Reading a token Next token is token '*' (1.39: ) Reducing stack by rule 9 (line 114): @@ -58682,7 +59276,6 @@ Shifting token '*' (1.39: ) Entering state 22 Stack now 0 8 21 4 12 22 -Return for a new token: Reading a token Next token is token '*' (1.41: ) 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -58700,7 +59293,6 @@ Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) @@ -58713,7 +59305,6 @@ -> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 112): @@ -58727,7 +59318,6 @@ Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.46: 1) Shifting token number (1.46: 1) @@ -58738,7 +59328,6 @@ -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): @@ -58764,7 +59353,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -58773,17 +59361,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: + | (* *) + (*) + (*) +./calc.at:1358: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -59801,26 +60381,741 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1363: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (#) + (#) = 2222 +./calc.at:1355: $PREPARSER ./calc input ./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: cat stderr +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -60839,38 +62134,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1358: cat stderr -./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -487. calc.at:1357: ok -input: -input: - | (!!) + (1 2) = 1 -./calc.at:1358: $PREPARSER ./calc input - | 1 2 -./calc.at:1360: $PREPARSER ./calc input -stderr: +./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -60881,121 +62147,255 @@ Stack now 0 4 Return for a new token: Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Return for a new token: Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Return for a new token: Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Return for a new token: Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Return for a new token: Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Return for a new token: +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 Return for a new token: Reading a token -Next token is token '=' (1.14: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -61014,7 +62414,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -61023,20 +62422,174 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1364: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 2 +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1357: cat stderr +input: stderr: Starting parse Entering state 0 @@ -61058,6 +62611,17 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 + | (!!) + (1 2) = 1 +./calc.at:1358: cat stderr +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1355: cat stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: +stdout: + | (1 + #) = 1111 +./calc.at:1357: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -61066,13 +62630,11 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token '!' (1.2: ) Shifting token '!' (1.2: ) Entering state 5 Stack now 0 4 5 -Return for a new token: Reading a token Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) @@ -61085,7 +62647,6 @@ Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) @@ -61098,19 +62659,16 @@ -> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.6: ) Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.8: ) Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.9: 1) Shifting token number (1.9: 1) @@ -61121,7 +62679,6 @@ -> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token number (1.11: 2) 1.11: syntax error, unexpected number @@ -61137,7 +62694,6 @@ Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) @@ -61150,7 +62706,6 @@ -> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 112): @@ -61164,7 +62719,6 @@ Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -61175,7 +62729,6 @@ -> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): @@ -61201,7 +62754,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -61210,31 +62762,35 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +./types.at:139: ./check +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 input: - | (- *) + (1 2) = 1 +input: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | 1 + 2 * 3 + !+ ++ ./calc.at:1358: $PREPARSER ./calc input -./calc.at:1360: cat stderr +stderr: stderr: Starting parse Entering state 0 @@ -61244,141 +62800,80 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Return for a new token: +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -61387,7 +62882,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -61396,8 +62890,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | (1 + #) = 1111 +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -61406,141 +62901,122 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Return for a new token: +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -61549,7 +63025,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -61558,23 +63033,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -493. calc.at:1367: testing Calculator parse.error=custom ... -./calc.at:1367: mv calc.y.tmp calc.y - -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: - | 1//2 -./calc.at:1360: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -61589,54 +63048,90 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1358: cat stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | (* *) + (*) + (*) -./calc.at:1358: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -61645,145 +63140,80 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -61792,7 +63222,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -61802,16 +63231,7 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -61821,145 +63241,80 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -61968,7 +63323,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -61977,45 +63331,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: cat stderr -input: -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1358: cat stderr -./calc.at:1363: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1364: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1358: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 stderr: Starting parse Entering state 0 @@ -62104,8 +63419,7 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -62116,104 +63430,135 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1362: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 -Return for a new token: +Stack now 0 4 12 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1360: cat stderr -./calc.at:1367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS input: +input: + | 1//2 +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1 + 2 * 3 + !- ++ ./calc.at:1358: $PREPARSER ./calc input -input: - | 1 = 2 = 3 -./calc.at:1360: $PREPARSER ./calc input -stderr: stderr: +input: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -62228,32 +63573,22 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (- *) + (1 2) = 1 +./calc.at:1360: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -62342,160 +63677,160 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr +stderr: +./calc.at:1357: cat stderr stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: cat stderr -input: -./calc.at:1360: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1358: $PREPARSER ./calc input -stderr: +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -62573,23 +63908,16 @@ Stack now 0 8 21 5 Return for a new token: Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1360: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Stack now 0 @@ -62603,108 +63931,24 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) +Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 ./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1355: $PREPARSER ./calc input ./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -62716,47 +63960,6 @@ }eg ' expout || exit 77 stderr: -./calc.at:1358: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1358: $PREPARSER ./calc input -stderr: -./calc.at:1360: cat stderr Starting parse Entering state 0 Stack now 0 @@ -62765,112 +63968,130 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token -Shifting token error (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: -1.8: syntax error: invalid character: '#' Reading a token -Shifting token error (1.8: ) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -62879,7 +64100,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -62888,9 +64108,22 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: $PREPARSER ./calc /dev/null +input: +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1357: $PREPARSER ./calc input stderr: +stderr: +./calc.at:1358: cat stderr Starting parse Entering state 0 Stack now 0 @@ -62899,9 +64132,8 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token +1.2: syntax error: invalid character: '#' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -62912,99 +64144,161 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -1.8: syntax error: invalid character: '#' +Stack now 0 4 Reading a token -Shifting token error (1.8: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -63013,7 +64307,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -63022,36 +64315,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: cat stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr +input: ./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -63062,11 +64328,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1358: $PREPARSER ./calc input stderr: -./calc.at:1360: cat stderr +input: + | 1 + 2 * 3 + !* ++ Starting parse Entering state 0 Stack now 0 @@ -63075,41 +64339,34 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' -Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -63122,13 +64379,11 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -63139,7 +64394,6 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -63164,7 +64418,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -63173,9 +64426,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -63184,60 +64435,51 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' -Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -63248,7 +64490,6 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -63273,7 +64514,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -63282,339 +64522,235 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1360: $PREPARSER ./calc input + | error +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1360: cat stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Return for a new token: Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1360: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1357: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Return for a new token: +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 8 21 30 22 +Return for a new token: Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 +Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token -Next token is token '*' (1.39: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Return for a new token: Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1355: cat stderr stderr: -./calc.at:1358: cat stderr Starting parse Entering state 0 Stack now 0 @@ -63624,298 +64760,132 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.44: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -63933,9 +64903,19 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) input: - | (# + 1) = 1111 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1360: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -63945,6 +64925,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1357: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -63954,84 +64936,96 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -64040,7 +65034,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -64049,9 +65042,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1360: cat stderr +./calc.at:1358: cat stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -64060,382 +65052,133 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (!!) + (1 2) = 1 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: cat stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -64452,10 +65195,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1362: cat stderr input: - | (1 + # + 1) = 1111 -./calc.at:1358: $PREPARSER ./calc input stderr: + | (1 + # + 1) = 1111 +./calc.at:1355: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -64464,7 +65208,6 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -64475,15 +65218,13 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' Reading a token +1.6: syntax error: invalid character: '#' Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -64498,7 +65239,6 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token '+' (1.8: ) Error: discarding token '+' (1.8: ) @@ -64507,7 +65247,6 @@ Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token number (1.10: 1) Error: discarding token number (1.10: 1) @@ -64516,7 +65255,6 @@ Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) @@ -64529,13 +65267,11 @@ -> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '=' (1.13: ) Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.15-18: 1111) Shifting token number (1.15-18: 1111) @@ -64546,7 +65282,6 @@ -> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): @@ -64571,7 +65306,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -64580,7 +65314,22 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | (#) + (#) = 2222 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1362: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -64590,7 +65339,6 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -64601,15 +65349,13 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' Reading a token +1.6: syntax error: invalid character: '#' Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -64624,7 +65370,6 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token '+' (1.8: ) Error: discarding token '+' (1.8: ) @@ -64633,7 +65378,6 @@ Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token number (1.10: 1) Error: discarding token number (1.10: 1) @@ -64642,26 +65386,23 @@ Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) -> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '=' (1.13: ) Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.15-18: 1111) Shifting token number (1.15-18: 1111) @@ -64672,7 +65413,6 @@ -> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): @@ -64697,7 +65437,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -64706,7 +65445,49 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +stderr: +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -64717,21 +65498,6 @@ }eg ' expout || exit 77 ./calc.at:1360: cat stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1358: cat stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -64740,130 +65506,112 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: +1.2: syntax error: invalid character: '#' Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: +1.8: syntax error: invalid character: '#' Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -64872,6 +65620,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -64880,10 +65629,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + 1) / (1 - 1) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: $PREPARSER ./calc input +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse @@ -64894,130 +65641,112 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: +1.2: syntax error: invalid character: '#' Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: +1.8: syntax error: invalid character: '#' Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -65026,6 +65755,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -65034,6 +65764,283 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1357: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 + | 1 + 2 * 3 + !+ ++ +./calc.at:1360: $PREPARSER ./calc input +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1358: cat stderr +./calc.at:1362: cat stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -65042,7 +66049,6 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -65053,13 +66059,11 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Return for a new token: Reading a token Next token is token number (1.6: 1) Shifting token number (1.6: 1) @@ -65070,7 +66074,6 @@ -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 -Return for a new token: Reading a token Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 112): @@ -65091,19 +66094,16 @@ -> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 -Return for a new token: Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 23 4 -Return for a new token: Reading a token Next token is token number (1.12: 1) Shifting token number (1.12: 1) @@ -65114,13 +66114,11 @@ -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 -Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 20 Stack now 0 8 23 4 12 20 -Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -65131,7 +66129,6 @@ -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 -Return for a new token: Reading a token Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 113): @@ -65152,7 +66149,6 @@ -> $$ = nterm exp (1.11-17: 0) Entering state 32 Stack now 0 8 23 32 -Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 10 (line 115): @@ -65178,7 +66174,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -65187,8 +66182,92 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1355: cat stderr stderr: +input: +input: Starting parse Entering state 0 Stack now 0 @@ -65197,7 +66276,6 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -65208,13 +66286,11 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Return for a new token: Reading a token Next token is token number (1.6: 1) Shifting token number (1.6: 1) @@ -65225,7 +66301,6 @@ -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 -Return for a new token: Reading a token Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 112): @@ -65246,19 +66321,16 @@ -> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 -Return for a new token: Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 23 4 -Return for a new token: Reading a token Next token is token number (1.12: 1) Shifting token number (1.12: 1) @@ -65269,13 +66341,11 @@ -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 -Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 20 Stack now 0 8 23 4 12 20 -Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -65286,7 +66356,6 @@ -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 -Return for a new token: Reading a token Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 113): @@ -65307,7 +66376,6 @@ -> $$ = nterm exp (1.11-17: 0) Entering state 32 Stack now 0 8 23 32 -Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 10 (line 115): @@ -65333,7 +66401,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -65342,171 +66409,26 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: cat stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | + | +1 +./calc.at:1362: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1358: $PREPARSER ./calc input input: - | (* *) + (*) + (*) -./calc.at:1358: cat stderr -./calc.at:1360: $PREPARSER ./calc input -stderr: stderr: -stdout: -488. calc.at:1358: ok -./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1360: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -65516,14 +66438,12 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 stderr: Starting parse Entering state 0 @@ -65533,133 +66453,87 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' +Reading a token +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -65668,6 +66542,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -65676,48 +66551,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1362: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input: -./calc.at:1362: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1360: $PREPARSER ./calc input +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -65788,190 +66624,147 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1355: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -65981,1104 +66774,140 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1357: cat stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 8 21 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by input: -rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 23 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 6 8 20 4 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 20 -Stack now 0 6 8 20 4 12 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 +Stack now 0 8 23 4 12 20 1 Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (14.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) - | 1 + 2 * 3 + !- ++ -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc} ... -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: mv calc.y.tmp calc.y - -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -67088,926 +66917,14 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 20 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -68088,10 +67005,7 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: - | 1 2 -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -68101,136 +67015,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1362: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1360: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -68241,404 +67025,133 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: cat stderr stderr: +487. calc.at:1357: ok Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | 1//2 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Stack now 0 4 12 21 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -input: - | (#) + (#) = 2222 -./calc.at:1360: $PREPARSER ./calc input -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Stack now 0 8 23 4 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 12 20 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -68655,9 +67168,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | error -./calc.at:1362: $PREPARSER ./calc input ./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -68668,29 +67178,48 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1358: cat stderr +./calc.at:1362: cat stderr +./calc.at:1362: $PREPARSER ./calc /dev/null ./calc.at:1360: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 input: + | (# + 1) = 1111 +./calc.at:1358: $PREPARSER ./calc input ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1360: $PREPARSER ./calc input +input: +./calc.at:1355: cat stderr stderr: +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1360: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 + stderr: Starting parse Entering state 0 @@ -68700,147 +67229,38 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: +1.2: syntax error: invalid character: '#' Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Return for a new token: +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -68853,11 +67273,13 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -68868,6 +67290,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -68892,6 +67315,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -68900,24 +67324,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1362: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1360: cat stderr - | 1 = 2 = 3 -./calc.at:1362: $PREPARSER ./calc input +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -68932,35 +67340,73 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 - | (# + 1) = 1111 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 @@ -68969,8 +67415,9 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Reading a token +Return for a new token: 1.2: syntax error: invalid character: '#' +Reading a token Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -68981,6 +67428,7 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) @@ -68989,6 +67437,7 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token number (1.6: 1) Error: discarding token number (1.6: 1) @@ -68997,6 +67446,7 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -69009,11 +67459,13 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -69024,6 +67476,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -69048,6 +67501,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -69056,7 +67510,22 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +486. calc.at:1355: ok +stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: +./types.at:139: $PREPARSER ./test +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -69071,132 +67540,74 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: "$PERL" -pi -e 'use strict; +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -69206,17 +67617,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1367: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c +stderr: +./calc.at:1362: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: cat stderr +======== Testing with C++ standard flags: '' ./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -69227,62 +67633,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1362: cat stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1367: $PREPARSER ./calc input -stderr: -./calc.at:1360: cat stderr -input: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1362: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: input: | (1 + # + 1) = 1111 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1360: cat stderr stderr: Starting parse Entering state 0 @@ -69292,6 +67650,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -69302,154 +67661,15 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Reading a token +Return for a new token: 1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -69464,6 +67684,7 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '+' (1.8: ) Error: discarding token '+' (1.8: ) @@ -69472,6 +67693,7 @@ Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token number (1.10: 1) Error: discarding token number (1.10: 1) @@ -69480,6 +67702,7 @@ Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) @@ -69492,11 +67715,13 @@ -> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.13: ) Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.15-18: 1111) Shifting token number (1.15-18: 1111) @@ -69507,6 +67732,7 @@ -> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): @@ -69531,6 +67757,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -69539,376 +67766,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1362: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 2 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1362: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1360: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 - | (1 + 1) / (1 - 1) -./calc.at:1360: $PREPARSER ./calc input -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: cat stderr -./calc.at:1367: cat stderr -input: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1362: $PREPARSER ./calc input -input: - | 1//2 -./calc.at:1367: $PREPARSER ./calc input -stderr: -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -70227,9 +68085,268 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' +Reading a token +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | (#) + (#) = 2222 +./calc.at:1360: $PREPARSER ./calc input ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -70547,15 +68664,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: cat stderr -./calc.at:1360: cat stderr -input: - | error -./calc.at:1367: $PREPARSER ./calc input -489. calc.at:1360: ok -stderr: -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: cat stderr ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -70566,14 +68676,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -stderr: -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1362: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1367: cat stderr +493. calc.at:1367: testing Calculator parse.error=custom ... stderr: Starting parse Entering state 0 @@ -70584,10 +68687,466 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1367: mv calc.y.tmp calc.y + +input: +./calc.at:1362: cat stderr +./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (1 + 1) / (1 - 1) +./calc.at:1358: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Return for a new token: +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Return for a new token: +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Return for a new token: +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Return for a new token: +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Return for a new token: +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc} ... +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: mv calc.y.tmp calc.y + +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Return for a new token: +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Return for a new token: +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Return for a new token: +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Return for a new token: +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Return for a new token: +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Return for a new token: +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (!!) + (1 2) = 1 +./calc.at:1360: cat stderr +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) @@ -70715,10 +69274,20 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | 1 = 2 = 3 -./calc.at:1367: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1360: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -70860,11 +69429,107 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: cat stderr stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -70875,13 +69540,126 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +488. calc.at:1358: ok +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1362: cat stderr input: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | (- *) + (1 2) = 1 ./calc.at:1362: $PREPARSER ./calc input -./calc.at:1367: cat stderr + +stderr: +stdout: +./types.at:139: ./check +./calc.at:1360: cat stderr stderr: -495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... Starting parse Entering state 0 Stack now 0 @@ -71030,14 +69808,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1369: mv calc.y.tmp calc.y - -input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | - | +1 -./calc.at:1367: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -71187,11 +69959,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: + | (# + 1) = 1111 +./calc.at:1360: $PREPARSER ./calc input stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -71202,19 +69973,217 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1362: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: -./calc.at:1367: cat stderr | (* *) + (*) + (*) ./calc.at:1362: $PREPARSER ./calc input -./calc.at:1367: $PREPARSER ./calc /dev/null -stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... stderr: -./calc.at:1367: cat stderr +./calc.at:1369: mv calc.y.tmp calc.y + +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -71366,9 +70335,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: +./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1360: cat stderr stderr: Starting parse Entering state 0 @@ -71521,9 +70491,125 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1367: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: + | (1 + # + 1) = 1111 +./calc.at:1360: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -71534,50 +70620,137 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1362: cat stderr -stderr: -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -./calc.at:1364: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: | 1 + 2 * 3 + !+ ++ ./calc.at:1362: $PREPARSER ./calc input -input: stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 Starting parse Entering state 0 Stack now 0 @@ -71658,13 +70831,9 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1364: $PREPARSER ./calc input +./calc.at:1360: cat stderr ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr stderr: -input: -./calc.at:1369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS - | (!!) + (1 2) = 1 Starting parse Entering state 0 Stack now 0 @@ -71745,2060 +70914,299 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1367: $PREPARSER ./calc input ./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1360: $PREPARSER ./calc input stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 20 -Stack now 0 6 8 20 4 12 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1362: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Enterinput: -ing state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 +Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 20 -Stack now 0 6 8 20 4 12 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1367: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1362: $PREPARSER ./calc input +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -input: - | (- *) + (1 2) = 1 Starting parse Entering state 0 Stack now 0 @@ -73879,46 +71287,22 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1367: $PREPARSER ./calc input -input: -stderr: - | 1 2 -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 @@ -73995,28 +71379,8 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 +./calc.at:1360: cat stderr +./calc.at:1369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -74027,35 +71391,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1367: cat stderr +489. calc.at:1360: ok ./calc.at:1362: cat stderr input: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1367: $PREPARSER ./calc input -stderr: -input: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ -./calc.at:1364: cat stderr ./calc.at:1362: $PREPARSER ./calc input stderr: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: Starting parse Entering state 0 Stack now 0 @@ -74137,11 +71479,7 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -input: ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1364: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 @@ -74224,68 +71562,6 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1367: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1367: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -74296,44 +71572,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1362: cat stderr -./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1364: cat stderr -input: -input: - | 1 + 2 * 3 + !- ++ input: -./calc.at:1367: $PREPARSER ./calc input | (#) + (#) = 2222 -stderr: - | error -stderr: ./calc.at:1362: $PREPARSER ./calc input -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stderr: -./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 stderr: Starting parse Entering state 0 @@ -74456,29 +71698,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -74600,23 +71821,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1367: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -74626,6 +71831,227 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full ... +./calc.at:1370: mv calc.y.tmp calc.y + +./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1362: cat stderr +input: +stderr: + | (1 + #) = 1111 +./calc.at:1362: $PREPARSER ./calc input +stdout: +stderr: +./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -74637,13 +72063,120 @@ }eg ' expout || exit 77 input: - | 1 + 2 * 3 + !* ++ -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1364: cat stderr -stderr: -memory exhausted -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1363: $PREPARSER ./calc input ./calc.at:1362: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1362: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -75662,21 +73195,109 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: -input: -memory exhausted - | 1 = 2 = 3 -./calc.at:1364: $PREPARSER ./calc input ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: - | (1 + #) = 1111 -./calc.at:1362: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 @@ -76690,352 +74311,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 ./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: -stderr: - | 1 2 -./calc.at:1363: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: - | (#) + (#) = 2222 -./calc.at:1367: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -stderr: +./calc.at:1370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -77046,187 +74323,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' input: - | - | +1 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: + | 1 2 ./calc.at:1362: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1363: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -input: -stderr: - | (# + 1) = 1111 -./calc.at:1362: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: -stdout: -./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: -stderr: - | 1//2 ./calc.at:1363: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + #) = 1111 -./calc.at:1367: $PREPARSER ./calc input stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -77241,143 +74343,17 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token "number" (1.3: 2) Stack now 0 -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1368: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1362: $PREPARSER ./calc input ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: Starting parse Entering state 0 Stack now 0 @@ -77392,111 +74368,12 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1364: $PREPARSER ./calc /dev/null -input: -stderr: -syntax error: invalid character: '#' -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1368: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1362: cat stderr -./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) +Cleanup: discarding lookahead token "number" (1.3: 2) Stack now 0 -./calc.at:1363: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1367: $PREPARSER ./calc input -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1362: $PREPARSER ./calc input - | 1 2 -./calc.at:1368: $PREPARSER ./calc input -input: - | error -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1363: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -77611,27 +74488,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: cat stderr -stderr: -stderr: -stderr: ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1364: $PREPARSER ./calc input stderr: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -77746,334 +74604,50 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1363: cat stderr +input: + | 1//2 +./calc.at:1363: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -stderr: -./calc.at:1367: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -78084,358 +74658,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1368: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1362: cat stderr -stderr: - | (1 + # + 1) = 1111 -input: -./calc.at:1367: $PREPARSER ./calc input - | 1//2 -./calc.at:1368: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1363: cat stderr -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -syntax error: invalid character: '#' -input: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1362: $PREPARSER ./calc input -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -input: -./calc.at:1364: "$PERL" -pi -e 'use strict; +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1362: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -78445,9 +74696,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1363: $PREPARSER ./calc input +input: +./calc.at:1363: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1362: $PREPARSER ./calc input +input: stderr: Starting parse Entering state 0 @@ -78590,52 +74843,21 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: cat stderr + | error +./calc.at:1363: $PREPARSER ./calc input stderr: -./calc.at:1368: cat stderr ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 +stderr: ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1367: cat stderr Starting parse Entering state 0 Stack now 0 @@ -78777,10 +74999,40 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: cat stderr +./calc.at:1363: cat stderr +490. calc.at:1362: ok input: -input: - | (!!) + (1 2) = 1 - | error + | 1 = 2 = 3 +./calc.at:1363: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -78821,166 +75073,48 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1364: $PREPARSER ./calc input -stderr: -input: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: - | (1 + 1) / (1 - 1) -./calc.at:1367: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -78991,175 +75125,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: cat stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -error: null divisor -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -490. calc.at:1362: ok -./calc.at:1368: cat stderr -error: null divisor -input: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: | | +1 ./calc.at:1363: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1368: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -79186,17 +75156,8 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -stderr: -./calc.at:1364: cat stderr - -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -input: -stderr: Starting parse Entering state 0 Stack now 0 @@ -79222,159 +75183,9 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 - | (- *) + (1 2) = 1 -./calc.at:1364: $PREPARSER ./calc input -493. calc.at:1367: ok -./calc.at:1368: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +stdout: +./types.at:139: $PREPARSER ./test ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -79385,168 +75196,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1368: $PREPARSER ./calc input stderr: -./calc.at:1363: cat stderr +497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full ... +./calc.at:1371: mv calc.y.tmp calc.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +======== Testing with C++ standard flags: '' ./calc.at:1363: $PREPARSER ./calc /dev/null stderr: -stderr: Starting parse Entering state 0 Stack now 0 @@ -79555,21 +75214,9 @@ 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) Stack now 0 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1364: cat stderr -./calc.at:1368: cat stderr Starting parse Entering state 0 Stack now 0 @@ -79578,13 +75225,20 @@ 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) Stack now 0 -./calc.at:1368: $PREPARSER ./calc /dev/null -input: stderr: - | (* *) + (*) + (*) -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1367: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -79595,327 +75249,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1363: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1367: $PREPARSER ./calc input input: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full ... -./calc.at:1370: mv calc.y.tmp calc.y - -./calc.at:1368: cat stderr | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: $PREPARSER ./calc input -497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full ... -./calc.at:1371: mv calc.y.tmp calc.y - stderr: -./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: +stderr: Starting parse Entering state 0 Stack now 0 @@ -80225,17 +75581,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Reading a./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - token +Reading a token Now at end of input. Shifting token "end of input" (2.1: ) Entering state 17 @@ -80243,11 +75589,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 @@ -80566,13 +75909,12 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: cat stderr +input: + | 1 2 +./calc.at:1367: $PREPARSER ./calc input +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -80583,184 +75925,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1364: $PREPARSER ./calc input stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) ./calc.at:1363: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) input: -./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 | (!!) + (1 2) = 1 +stderr: ./calc.at:1363: $PREPARSER ./calc input -input: +./calc.at:1367: cat stderr +stdout: +./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: Starting parse Entering state 0 @@ -80902,91 +76076,25 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !- ++ -./calc.at:1364: $PREPARSER ./calc input +input: +./calc.at:1368: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + + | 1//2 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: $PREPARSER ./calc input stderr: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -81127,89 +76235,28 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr +input: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1368: $PREPARSER ./calc input +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stdout: +stderr: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -81220,30 +76267,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./calc.at:1371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1367: cat stderr ./calc.at:1363: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +input: | (- *) + (1 2) = 1 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1368: $PREPARSER ./calc input +stderr: ./calc.at:1363: $PREPARSER ./calc input -./calc.at:1364: cat stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +input: + | error +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) Starting parse Entering state 0 Stack now 0 @@ -81392,96 +76435,12 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1364: $PREPARSER ./calc input +./calc.at:1367: $PREPARSER ./calc input ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1368: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: cat stderr Starting parse Entering state 0 Stack now 0 @@ -81630,90 +76589,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -81724,34 +76602,52 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1368: $PREPARSER ./calc input stderr: -./calc.at:1370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stdout: +input: +./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1363: cat stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | 1//2 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1364: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1367: cat stderr input: +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) | (* *) + (*) + (*) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +input: +./calc.at:1364: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1367: $PREPARSER ./calc input +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1364: cat stderr stderr: Starting parse Entering state 0 @@ -81904,10 +76800,13 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: ./calc.at:1368: cat stderr +stderr: +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: Starting parse Entering state 0 Stack now 0 @@ -82059,377 +76958,15 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (#) + (#) = 2222 -./calc.at:1364: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) + | error +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1368: $PREPARSER ./calc input -./calc.at:1363: cat stderr stderr: -stderr: -stdout: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1369: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1368: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1367: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -82439,29 +76976,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1369: $PREPARSER ./calc input -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1364: cat stderr stderr: stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -82506,7 +77023,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) +Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -82514,7 +77031,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -82522,122 +77039,36 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (1 + #) = 1111 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1363: $PREPARSER ./calc input -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -82647,238 +77078,2257 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !- ++ -input: -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: $PREPARSER ./calc input - | 1 2 -./calc.at:1369: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 6 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 22 -Stack now 0 8 21 30 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 6 8 21 30 22 2 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 Reading a token -Next token is token '+' (1.11: ) +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -Starting parse -Entering state 0 -Stack now 0 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 -Stack now 0 4 1 +Stack now 0 6 8 19 2 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 2 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 6 2 10 24 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1363: cat stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1368: cat stderr +stderr: +stderr: +stderr: +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Enterinput: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +ing state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stdout: +./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' + | 1 + 2 * 3 + !+ ++ +./calc.at:1363: $PREPARSER ./calc input +input: + | 1 = 2 = 3 +stderr: +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1369: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +stderr: +./calc.at:1364: $PREPARSER ./calc input +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +./calc.at:1367: cat stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1369: $PREPARSER ./calc input +stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: cat stderr +./calc.at:1367: $PREPARSER ./calc /dev/null +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 + | 1 + 2 * 3 + !- ++ +./calc.at:1363: $PREPARSER ./calc input +stderr: +input: + | + | +1 +./calc.at:1368: $PREPARSER ./calc input +stderr: +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token @@ -82928,9 +79378,9 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1368: cat stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -82941,6 +79391,101 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +input: + | 1 2 +./calc.at:1369: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1364: cat stderr +./calc.at:1367: cat stderr +stdout: +./types.at:139: $PREPARSER ./test +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1368: cat stderr +stderr: ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -82951,125 +79496,73 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !* ++ -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1364: cat stderr -./calc.at:1363: cat stderr -./calc.at:1369: cat stderr +./calc.at:1368: $PREPARSER ./calc /dev/null +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input stderr: -1.14: memory exhausted +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1363: cat stderr ./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.14: memory exhausted -input: - | (# + 1) = 1111 -input: -./calc.at:1364: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1363: $PREPARSER ./calc input - | 1//2 -./calc.at:1369: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' stderr: +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: -./calc.at:1368: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +stderr: +stderr: +./calc.at:1369: cat stderr Starting parse Entering state 0 Stack now 0 @@ -83151,112 +79644,51 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -./calc.at:1368: $PREPARSER ./calc input -stderr: -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1368: cat stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1//2 +./calc.at:1369: $PREPARSER ./calc input +input: stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -83338,13 +79770,23 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1367: cat stderr +stderr: +./calc.at:1368: $PREPARSER ./calc input 1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 ./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1364: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1367: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -83355,41 +79797,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1369: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1368: cat stderr -./calc.at:1363: cat stderr +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 input: -./calc.at:1364: cat stderr | error -input: -./calc.at:1369: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1368: $PREPARSER ./calc input -input: stderr: +./calc.at:1363: cat stderr +./calc.at:1364: $PREPARSER ./calc input +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1369: cat stderr stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.6: syntax error: invalid character: '#' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (#) + (#) = 2222 ./calc.at:1363: $PREPARSER ./calc input input: +./calc.at:1368: cat stderr + | error +./calc.at:1369: $PREPARSER ./calc input stderr: - | (1 + # + 1) = 1111 -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: Starting parse Entering state 0 @@ -83512,126 +79966,30 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: +input: ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1368: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1368: $PREPARSER ./calc input stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +input: Starting parse Entering state 0 Stack now 0 @@ -83753,168 +80111,138 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1364: cat stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 + | (- *) + (1 2) = 1 +./calc.at:1367: $PREPARSER ./calc input +stderr: input: +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 + | 1 = 2 = 3 ./calc.at:1369: cat stderr - | (# + 1) = 1111 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1368: cat stderr +stderr: stderr: -./calc.at:1368: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 input: -1.2: syntax error: invalid character: '#' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | 1 = 2 = 3 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1369: $PREPARSER ./calc input +./calc.at:1363: cat stderr +input: stderr: stderr: -1.2: syntax error: invalid character: '#' 1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1363: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 + | (- *) + (1 2) = 1 +./calc.at:1368: $PREPARSER ./calc input input: -./calc.at:1368: cat stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: | (1 + #) = 1111 -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1364: cat stderr ./calc.at:1363: $PREPARSER ./calc input -input: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +stderr: stderr: - | (1 + # + 1) = 1111 -./calc.at:1368: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -84013,18 +80341,26 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -input: -./calc.at:1369: cat stderr -1.6: syntax error: invalid character: '#' +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $PREPARSER ./calc input stderr: -stderr: -1.6: syntax error: invalid character: '#' input: +stderr: +./calc.at:1364: cat stderr Starting parse Entering state 0 Stack now 0 @@ -84123,135 +80459,51 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stdout: +./calc.at:1369: cat stderr + | (* *) + (*) + (*) +./calc.at:1367: $PREPARSER ./calc input +./types.at:139: ./check +./calc.at:1368: cat stderr stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +input: +input: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 | | +1 +./calc.at:1364: $PREPARSER ./calc input ./calc.at:1369: $PREPARSER ./calc input +input: +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: + | (* *) + (*) + (*) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -84261,155 +80513,37 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1368: $PREPARSER ./calc input +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1363: cat stderr +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -84419,21 +80553,21 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: 2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) input: -./calc.at:1363: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1368: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) + | (# + 1) = 1111 +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1369: cat stderr ./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -84444,14 +80578,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1369: cat stderr -input: -stderr: - | (# + 1) = 1111 -./calc.at:1363: $PREPARSER ./calc input -1.11-17: error: null divisor stderr: -./calc.at:1364: cat stderr +./calc.at:1367: cat stderr Starting parse Entering state 0 Stack now 0 @@ -84547,14 +80675,13 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1368: cat stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1369: $PREPARSER ./calc /dev/null stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -492. calc.at:1364: ./calc.at:1368: cat stderr - ok +./calc.at:1364: cat stderr stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -84650,10 +80777,16 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !+ ++ +./calc.at:1368: $PREPARSER ./calc input stderr: 1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -494. calc.at:1368: ok -./calc.at:1369: cat stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1367: $PREPARSER ./calc input +stderr: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -84664,23 +80797,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1369: $PREPARSER ./calc input +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 ./calc.at:1363: cat stderr - stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +./calc.at:1369: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + # + 1) = 1111 +stderr: ./calc.at:1363: $PREPARSER ./calc input stderr: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1369: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1367: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -84796,13 +80956,43 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stdout: +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' 1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) 1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) 1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.1-46: error: 4444 != 1 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: +stderr: +./calc.at:1368: cat stderr Starting parse Entering state 0 Stack now 0 @@ -84917,7 +81107,20 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1369: cat stderr +./calc.at:1364: cat stderr +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +input: +input: + | 1 + 2 * 3 + !* ++ +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: +./calc.at:1368: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -84928,19 +81131,683 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1370: $PREPARSER ./calc input +stderr: +./calc.at:1364: $PREPARSER ./calc input +1.14: memory exhausted +stderr: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1363: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1369: $PREPARSER ./calc input stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +./calc.at:1369: cat stderr +./calc.at:1367: cat stderr +1.14: memory exhausted +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: input: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + 1) / (1 - 1) +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (!!) + (1 2) = 1 ./calc.at:1363: $PREPARSER ./calc input +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1368: cat stderr stderr: -499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed ... +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1367: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -85083,16 +81950,35 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1375: mv calc.y.tmp calc.y - +input: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (#) + (#) = 2222 +./calc.at:1368: $PREPARSER ./calc input +memory exhausted +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: 1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) 1.1-16: error: 2222 != 1 -498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union ... + | 1 2 +./calc.at:1370: $PREPARSER ./calc input stderr: -./calc.at:1374: mv calc.y.tmp calc.y - +stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -85234,7 +82120,15 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +memory exhausted +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1364: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1369: cat stderr ./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -85246,156 +82140,2239 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: cat stderr +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +input: +./calc.at:1368: cat stderr input: | (- *) + (1 2) = 1 ./calc.at:1369: $PREPARSER ./calc input -491. calc.at:1363: stderr: - ok +./calc.at:1363: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1370: cat stderr +stderr: +./calc.at:1367: cat stderr 1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) 1.1-17: error: 2222 != 1 - +stderr: ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + #) = 1111 +./calc.at:1368: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +491. calc.at:1363: input: + ok +input: stderr: + | 1//2 +./calc.at:1370: $PREPARSER ./calc input 1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) 1.1-17: error: 2222 != 1 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1367: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: ./calc.at:1369: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1368: cat stderr +input: +./calc.at:1370: cat stderr input: +./calc.at:1364: cat stderr | (* *) + (*) + (*) ./calc.at:1369: $PREPARSER ./calc input stderr: +./calc.at:1367: cat stderr + | (# + 1) = 1111 +./calc.at:1368: $PREPARSER ./calc input +input: 1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +input: + | error +./calc.at:1370: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1364: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | (1 + #) = 1111 +stderr: +./calc.at:1367: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) 1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -500. calc.at:1387: testing Calculator %glr-parser ... -./calc.at:1387: mv calc.y.tmp calc.y - +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +./calc.at:1368: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1369: cat stderr -./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1375: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +syntax error: invalid character: '#' +./calc.at:1370: cat stderr +input: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: + | (1 + # + 1) = 1111 +./calc.at:1368: $PREPARSER ./calc input | 1 + 2 * 3 + !+ ++ ./calc.at:1369: $PREPARSER ./calc input +input: + | 1 = 2 = 3 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1367: cat stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1364: cat stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +input: +stderr: + | (# + 1) = 1111 +input: +./calc.at:1367: $PREPARSER ./calc input +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) ./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: + | (* *) + (*) + (*) +./calc.at:1364: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1368: cat stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1370: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: +syntax error: invalid character: '#' +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !- ++ ./calc.at:1369: $PREPARSER ./calc input +input: +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1368: $PREPARSER ./calc input + | + | +1 +./calc.at:1370: $PREPARSER ./calc input +stderr: +stderr: +1.11-17: error: null divisor +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +1.11-17: error: null divisor +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union ... +stderr: +./calc.at:1374: mv calc.y.tmp calc.y + +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1364: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1367: $PREPARSER ./calc input +stderr: +./calc.at:1368: cat stderr stderr: +./calc.at:1370: cat stderr +syntax error: invalid character: '#' +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1370: $PREPARSER ./calc /dev/null + | 1 + 2 * 3 + !+ ++ +./calc.at:1364: $PREPARSER ./calc input ./calc.at:1369: cat stderr +stderr: +494. calc.at:1368: syntax error: invalid character: '#' +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + ok +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) | 1 + 2 * 3 + !* ++ ./calc.at:1369: $PREPARSER ./calc input stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr 1.14: memory exhausted + +./calc.at:1367: cat stderr ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1370: cat stderr +input: 1.14: memory exhausted +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1364: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1367: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1369: cat stderr +error: null divisor + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +error: null divisor input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) | (#) + (#) = 2222 ./calc.at:1369: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: cat stderr +./calc.at:1367: cat stderr stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' +./calc.at:1370: cat stderr +493. calc.at:1367: input: + ok +input: + | 1 + 2 * 3 + !* ++ ./calc.at:1369: cat stderr -./calc.at:1387: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1364: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1370: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + #) = 1111 ./calc.at:1369: $PREPARSER ./calc input + +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: 1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed ... +./calc.at:1375: mv calc.y.tmp calc.y + +./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: 1.6: syntax error: invalid character: '#' +./calc.at:1370: cat stderr ./calc.at:1369: cat stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: input: | (# + 1) = 1111 ./calc.at:1369: $PREPARSER ./calc input stderr: 1.2: syntax error: invalid character: '#' + | (- *) + (1 2) = 1 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1364: cat stderr ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +1.2: syntax error: invalid character: '#' +input: + | (#) + (#) = 2222 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token 1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +500. calc.at:1387: testing Calculator %glr-parser ... +./calc.at:1387: mv calc.y.tmp calc.y + +stderr: ./calc.at:1369: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: | (1 + # + 1) = 1111 ./calc.at:1369: $PREPARSER ./calc input +./calc.at:1370: cat stderr stderr: 1.6: syntax error: invalid character: '#' +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1370: $PREPARSER ./calc input stderr: +./calc.at:1364: cat stderr 1.6: syntax error: invalid character: '#' +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +input: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1364: $PREPARSER ./calc input ./calc.at:1369: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) | (1 + 1) / (1 - 1) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1369: $PREPARSER ./calc input stderr: 1.11-17: error: null divisor +./calc.at:1375: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: cat stderr stderr: 1.11-17: error: null divisor -./calc.at:1369: cat stderr -495. calc.at:1369: ok - -stderr: -stdout: -./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1370: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -501. calc.at:1389: testing Calculator %glr-parser %header ... -./calc.at:1389: mv calc.y.tmp calc.y - +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | 1 + 2 * 3 + !+ ++ ./calc.at:1370: $PREPARSER ./calc input +./calc.at:1364: cat stderr stderr: -./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: stdout: ./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: - | 1 2 ./calc.at:1371: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -85407,11 +84384,111 @@ || /\t/ )' calc.c -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1369: cat stderr ./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | (# + 1) = 1111 +./calc.at:1364: $PREPARSER ./calc input +495. calc.at:1369: stderr: +./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr + ok +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -85425,80 +84502,872 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1371: $PREPARSER ./calc input stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 + 2 * 3 + !- ++ ./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: cat stderr +./calc.at:1370: $PREPARSER ./calc input stderr: -input: +stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | 1//2 -./calc.at:1370: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1364: cat stderr input: | 1 2 ./calc.at:1371: $PREPARSER ./calc input -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1387: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1370: cat stderr stderr: +input: + | (1 + # + 1) = 1111 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +input: +./calc.at:1364: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1370: $PREPARSER ./calc input ./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1370: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.14: memory exhausted +stderr: 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.14: memory exhausted +./calc.at:1371: cat stderr input: - | error +./calc.at:1370: cat stderr +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1//2 +./calc.at:1371: $PREPARSER ./calc input +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) + | (#) + (#) = 2222 +./calc.at:1364: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1370: $PREPARSER ./calc input +input: + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1364: $PREPARSER ./calc input +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +501. calc.at:1389: testing Calculator %glr-parser %header ... +./calc.at:1389: mv calc.y.tmp calc.y + +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1371: cat stderr stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | error +./calc.at:1371: $PREPARSER ./calc input +stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1370: cat stderr 1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) input: - | 1//2 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1370: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1364: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.6: syntax error: invalid character: '#' +492. calc.at:1364: ./calc.at:1371: cat stderr + ok ./calc.at:1370: cat stderr -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1389: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stdout: +input: +./types.at:139: $PREPARSER ./test input: | 1 = 2 = 3 +stderr: +./calc.at:1371: $PREPARSER ./calc input + | (# + 1) = 1111 ./calc.at:1370: $PREPARSER ./calc input -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +1.2: syntax error: invalid character: '#' stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +======== Testing with C++ standard flags: '' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1371: cat stderr +1.2: syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) ./calc.at:1370: cat stderr -input: +./calc.at:1371: cat stderr input: | | +1 +./calc.at:1371: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 ./calc.at:1370: $PREPARSER ./calc input stderr: - | error -./calc.at:1371: $PREPARSER ./calc input 2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.6: syntax error: invalid character: '#' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +./calc.at:1389: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +1.6: syntax error: invalid character: '#' +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1370: cat stderr +./calc.at:1371: cat stderr +./calc.at:1371: $PREPARSER ./calc /dev/null +502. calc.at:1390: testing Calculator %glr-parser %locations ... +input: +./calc.at:1390: mv calc.y.tmp calc.y + +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (1 + 1) / (1 - 1) +./calc.at:1370: $PREPARSER ./calc input +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor ./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.11-17: error: null divisor +./calc.at:1371: cat stderr +./calc.at:1370: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1371: $PREPARSER ./calc input +496. calc.at:1370: ok +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 + +./calc.at:1371: cat stderr +stderr: +stdout: +input: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (!!) + (1 2) = 1 +./calc.at:1371: $PREPARSER ./calc input +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 ./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1371: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1371: $PREPARSER ./calc input +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span} ... +./calc.at:1391: mv calc.y.tmp calc.y + +./calc.at:1371: cat stderr +./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: + | (* *) + (*) + (*) +./calc.at:1371: $PREPARSER ./calc input +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1371: cat stderr +stderr: +stdout: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: ./calc.at:1374: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -85510,10 +85379,10 @@ || /\t/ )' calc.c -./calc.at:1370: cat stderr -./calc.at:1371: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1370: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -85529,43 +85398,29 @@ | (2^2)^3 = 64 ./calc.at:1374: $PREPARSER ./calc input stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -stderr: ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 +input: + | 1 + 2 * 3 + !- ++ ./calc.at:1371: $PREPARSER ./calc input -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) ./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: cat stderr +stderr: input: | 1 2 ./calc.at:1374: $PREPARSER ./calc input stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: syntax error ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: syntax error - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1370: $PREPARSER ./calc input -stderr: ./calc.at:1371: cat stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | 1 + 2 * 3 + !* ++ +./calc.at:1371: $PREPARSER ./calc input +stderr: +1.14: memory exhausted ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85576,45 +85431,65 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1391: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 - | - | +1 +1.14: memory exhausted +./calc.at:1374: cat stderr +input: +./calc.at:1371: cat stderr + | 1//2 +./calc.at:1374: $PREPARSER ./calc input +stderr: +input: + | (#) + (#) = 2222 ./calc.at:1371: $PREPARSER ./calc input +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1370: cat stderr +syntax error +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' ./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1371: cat stderr ./calc.at:1374: cat stderr input: input: + | error + | (1 + #) = 1111 +./calc.at:1371: $PREPARSER ./calc input stderr: - | (!!) + (1 2) = 1 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1370: $PREPARSER ./calc input -stderr: - | 1//2 ./calc.at:1374: $PREPARSER ./calc input stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 syntax error +1.6: syntax error: invalid character: '#' ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr stderr: +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error +stdout: +./types.at:139: $PREPARSER ./test stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1371: $PREPARSER ./calc /dev/null +1.6: syntax error: invalid character: '#' stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1370: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: cat stderr +======== Testing with C++ standard flags: '' ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85625,41 +85500,68 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS input: - | (- *) + (1 2) = 1 -./calc.at:1370: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1371: $PREPARSER ./calc input stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.2: syntax error: invalid character: '#' ./calc.at:1374: cat stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 +./calc.at:1371: cat stderr input: - | error + | 1 = 2 = 3 ./calc.at:1374: $PREPARSER ./calc input -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1371: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1371: $PREPARSER ./calc input +stderr: syntax error ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1371: cat stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + 1) / (1 - 1) ./calc.at:1371: $PREPARSER ./calc input stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +1.11-17: error: null divisor ./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: cat stderr +stderr: +1.11-17: error: null divisor +input: + | + | +1 +./calc.at:1374: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: cat stderr +stderr: +syntax error +stderr: +stdout: +497. calc.at:1371: ok +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85670,54 +85572,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1370: cat stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -input: - | (* *) + (*) + (*) -./calc.at:1370: $PREPARSER ./calc input + ./calc.at:1374: cat stderr stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1371: cat stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1374: $PREPARSER ./calc /dev/null stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -input: - | 1 = 2 = 3 -input: -./calc.at:1374: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1371: $PREPARSER ./calc input stderr: -./calc.at:1370: cat stderr syntax error ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1374: cat stderr input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1370: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input stdout: -stderr: ./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1375: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -85729,6 +85624,16 @@ || /\t/ )' calc.c +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc" ... +./calc.at:1392: mv calc.y.tmp calc.y + +./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85739,10 +85644,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1371: cat stderr -./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -85757,53 +85658,30 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1375: $PREPARSER ./calc input -input: -input: -./calc.at:1374: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1370: $PREPARSER ./calc input -stderr: stderr: ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1371: $PREPARSER ./calc input -input: stderr: -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | - | +1 -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 +./calc.at:1374: cat stderr +input: + | (!!) + (1 2) = 1 ./calc.at:1374: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: -stderr: | 1 2 ./calc.at:1375: $PREPARSER ./calc input -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 syntax error +error: 2222 != 1 +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1370: cat stderr 1.3: syntax error, unexpected number ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.3: syntax error, unexpected number -./calc.at:1371: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1370: $PREPARSER ./calc input stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; +syntax error +error: 2222 != 1 +1.3: syntax error, unexpected number +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85813,8 +85691,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.14: memory exhausted -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85824,46 +85701,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1371: $PREPARSER ./calc input ./calc.at:1375: cat stderr stderr: -1.14: memory exhausted -stderr: -./calc.at:1374: cat stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y input: +./calc.at:1374: cat stderr | 1//2 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1375: $PREPARSER ./calc input -./calc.at:1370: cat stderr -./calc.at:1374: $PREPARSER ./calc /dev/null -stderr: stderr: 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: input: + | (- *) + (1 2) = 1 +./calc.at:1374: $PREPARSER ./calc input stderr: - | (#) + (#) = 2222 -./calc.at:1370: $PREPARSER ./calc input 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +syntax error syntax error +error: 2222 != 1 +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr +syntax error +syntax error +error: 2222 != 1 ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85874,10 +85737,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: ./calc.at:1375: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +input: + | error +./calc.at:1375: $PREPARSER ./calc input ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85888,32 +85751,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -input: -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1370: cat stderr - | error -./calc.at:1375: $PREPARSER ./calc input -stderr: stderr: 1.1: syntax error, unexpected invalid token ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1392: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1374: cat stderr 1.1: syntax error, unexpected invalid token -stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -1.6: syntax error: invalid character: '#' -./calc.at:1374: $PREPARSER ./calc input -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85924,33 +85768,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | (* *) + (*) + (*) +./calc.at:1374: $PREPARSER ./calc input +stderr: +./calc.at:1375: cat stderr syntax error syntax error syntax error -syntax error -error: 4444 != 1 ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.6: syntax error: invalid character: '#' stderr: -./calc.at:1375: cat stderr -syntax error +stdout: syntax error syntax error syntax error -error: 4444 != 1 -./calc.at:1370: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1371: $PREPARSER ./calc input -input: -stderr: +./types.at:139: $PREPARSER ./test input: | 1 = 2 = 3 ./calc.at:1375: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1370: $PREPARSER ./calc input +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +1.7: syntax error, unexpected '=' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85961,20 +85803,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' stderr: -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.7: syntax error, unexpected '=' -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1374: cat stderr -./calc.at:1371: cat stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -85985,44 +85817,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1370: cat stderr +./calc.at:1374: cat stderr ./calc.at:1375: cat stderr input: - | (!!) + (1 2) = 1 -input: - | 1 + 2 * 3 + !* ++ + | 1 + 2 * 3 + !+ ++ ./calc.at:1374: $PREPARSER ./calc input -./calc.at:1371: $PREPARSER ./calc input stderr: input: -input: | | +1 -syntax error -error: 2222 != 1 +./calc.at:1375: $PREPARSER ./calc input ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1375: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1370: $PREPARSER ./calc input -1.14: memory exhausted stderr: 2.1: syntax error, unexpected '+' -stderr: ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -syntax error -error: 2222 != 1 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: 2.1: syntax error, unexpected '+' -1.14: memory exhausted -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1371: cat stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1374: $PREPARSER ./calc input ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86033,7 +85848,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1370: cat stderr +stderr: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr +stderr: +./calc.at:1375: $PREPARSER ./calc /dev/null +stderr: +1.1: syntax error, unexpected end of file +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error, unexpected end of file ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86044,35 +85868,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1375: cat stderr - | (#) + (#) = 2222 -./calc.at:1371: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1370: $PREPARSER ./calc input -stderr: -./calc.at:1375: $PREPARSER ./calc /dev/null -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stderr: -1.11-17: error: null divisor -1.1: syntax error, unexpected end of file -./calc.at:1374: cat stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error, unexpected end of file -stderr: -stderr: -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.11-17: error: null divisor - | (- *) + (1 2) = 1 -./calc.at:1374: $PREPARSER ./calc input ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86083,35 +85878,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr -./calc.at:1370: cat stderr ./calc.at:1375: cat stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -input: +./calc.at:1374: cat stderr input: - | (1 + #) = 1111 | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1375: $PREPARSER ./calc input -496. calc.at:1370: ./calc.at:1371: $PREPARSER ./calc input - ok -stderr: stderr: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -1.6: syntax error: invalid character: '#' ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1374: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +stderr: +memory exhausted +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +memory exhausted +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86121,17 +85914,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 stderr: -1.6: syntax error: invalid character: '#' - -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1375: cat stderr +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86141,36 +85929,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1374: cat stderr -./calc.at:1371: cat stderr -./calc.at:1375: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1374: $PREPARSER ./calc input -stderr: input: -syntax error -syntax error -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (!!) + (1 2) = 1 ./calc.at:1375: $PREPARSER ./calc input -input: stderr: 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -stderr: +./calc.at:1374: cat stderr ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1371: $PREPARSER ./calc input -syntax error -syntax error -syntax error stderr: 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 +input: + | (#) + (#) = 2222 +./calc.at:1374: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86181,8 +85956,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1375: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1375: $PREPARSER ./calc input ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86193,43 +85974,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' -./calc.at:1375: cat stderr -input: -./calc.at:1374: cat stderr -./calc.at:1371: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1375: $PREPARSER ./calc input -input: -input: stderr: 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 - | 1 + 2 * 3 + !+ ++ -./calc.at:1374: $PREPARSER ./calc input ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + # + 1) = 1111 -./calc.at:1371: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -502. calc.at:1390: testing Calculator %glr-parser %locations ... -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 -./calc.at:1390: mv calc.y.tmp calc.y - -stderr: -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -1.6: syntax error: invalid character: '#' -stderr: -stdout: +./calc.at:1374: cat stderr +input: ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86240,37 +85995,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./types.at:139: $PREPARSER ./test - | 1 + 2 * 3 + !- ++ -./calc.at:1371: cat stderr + | (1 + #) = 1111 ./calc.at:1374: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1375: cat stderr stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1371: $PREPARSER ./calc input +syntax error: invalid character: '#' input: -stderr: | (* *) + (*) + (*) -stderr: ./calc.at:1375: $PREPARSER ./calc input -1.11-17: error: null divisor stderr: 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.11-17: error: null divisor ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86281,8 +86021,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -450. types.at:139: ok -./calc.at:1371: cat stderr +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86295,30 +86038,24 @@ ' expout || exit 77 ./calc.at:1374: cat stderr ./calc.at:1375: cat stderr -497. calc.at:1371: ok - input: input: + | (# + 1) = 1111 +./calc.at:1374: $PREPARSER ./calc input | 1 + 2 * 3 + !+ ++ ./calc.at:1375: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1374: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted stderr: - -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error: invalid character: '#' stderr: -memory exhausted +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: | 1 + 2 * 3 + !- ++ ./calc.at:1375: $PREPARSER ./calc input -stderr: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86329,6 +86066,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: cat stderr +stderr: +input: ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86339,34 +86081,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1374: cat stderr -input: -./calc.at:1375: cat stderr - | (#) + (#) = 2222 + | (1 + # + 1) = 1111 ./calc.at:1374: $PREPARSER ./calc input stderr: syntax error: invalid character: '#' +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr +stderr: syntax error: invalid character: '#' input: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !* ++ ./calc.at:1375: $PREPARSER ./calc input stderr: -stderr: -503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span} ... -syntax error: invalid character: '#' -syntax error: invalid character: '#' -504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc" ... -./calc.at:1392: mv calc.y.tmp calc.y - -1.14: memory exhausted -./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: mv calc.y.tmp calc.y - -stderr: -./calc.at:1390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y 1.14: memory exhausted ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -86378,6 +86104,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.14: memory exhausted +./calc.at:1374: cat stderr ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86388,27 +86118,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: cat stderr -./calc.at:1374: cat stderr input: +./calc.at:1375: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1374: $PREPARSER ./calc input +stderr: +error: null divisor +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + #) = 1111 | (#) + (#) = 2222 ./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: $PREPARSER ./calc input stderr: +error: null divisor stderr: -syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -syntax error: invalid character: '#' 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86418,7 +86147,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86432,20 +86161,19 @@ ./calc.at:1374: cat stderr input: | (1 + #) = 1111 +input: + | 123 +./calc.at:1374: $PREPARSER ./calc --num input +stderr: ./calc.at:1375: $PREPARSER ./calc input stderr: -input: 1.6: syntax error: invalid character: '#' ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1374: $PREPARSER ./calc input -stderr: stderr: -syntax error: invalid character: '#' ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.6: syntax error: invalid character: '#' stderr: -syntax error: invalid character: '#' +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86456,34 +86184,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1375: cat stderr input: -./calc.at:1374: cat stderr + | 1 + 2 * 3 +./calc.at:1374: $PREPARSER ./calc --num input +input: +stderr: +syntax error | (# + 1) = 1111 +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1375: $PREPARSER ./calc input -input: stderr: - | (1 + # + 1) = 1111 -./calc.at:1374: $PREPARSER ./calc input +stderr: 1.2: syntax error: invalid character: '#' -./calc.at:1392: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +syntax error ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: 1.2: syntax error: invalid character: '#' -./calc.at:1391: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86494,17 +86211,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1375: cat stderr input: -stderr: +./calc.at:1374: cat stderr | (1 + # + 1) = 1111 +stderr: ./calc.at:1375: $PREPARSER ./calc input -syntax error: invalid character: '#' +stdout: stderr: +./types.at:139: $PREPARSER ./test 1.6: syntax error: invalid character: '#' +input: ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: + | 1 + 2 * 3 +./calc.at:1374: $PREPARSER ./calc --exp input +stderr: 1.6: syntax error: invalid character: '#' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86515,34 +86252,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1375: cat stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +498. calc.at:1374: ok input: | (1 + 1) / (1 - 1) ./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: cat stderr stderr: 1.11-17: error: null divisor ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + 1.11-17: error: null divisor -input: - | (1 + 1) / (1 - 1) -./calc.at:1374: $PREPARSER ./calc input -stderr: -error: null divisor -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86553,22 +86276,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1375: cat stderr -./calc.at:1374: cat stderr input: | 123 +./calc.at:1375: $PREPARSER ./calc --num input +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +input: stdout: + | 1 + 2 * 3 ./calc.at:1375: $PREPARSER ./calc --num input ./calc.at:1387: "$PERL" -ne ' chomp; @@ -86582,9 +86301,9 @@ )' calc.c stderr: +1.3: syntax error, unexpected '+', expecting end of file ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -86598,45 +86317,16 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 123 stderr: -./calc.at:1374: $PREPARSER ./calc --num input -input: -input: - | 1 2 - | 1 + 2 * 3 ./calc.at:1387: $PREPARSER ./calc input -./calc.at:1375: $PREPARSER ./calc --num input -stderr: -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: 1.3: syntax error, unexpected '+', expecting end of file stderr: -syntax error -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc} ... +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: mv calc.y.tmp calc.y + stderr: -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error, unexpected '+', expecting end of file -./calc.at:1387: cat stderr +./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86648,23 +86338,18 @@ }eg ' expout || exit 77 input: - | 1//2 -input: +stderr: +stdout: + | 1 2 ./calc.at:1387: $PREPARSER ./calc input - | 1 + 2 * 3 +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./calc.at:1374: $PREPARSER ./calc --num input syntax error ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -stderr: -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1375: cat stderr stderr: syntax error -input: ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86675,31 +86360,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: | 1 + 2 * 3 ./calc.at:1375: $PREPARSER ./calc --exp input -stderr: ./calc.at:1387: cat stderr +stderr: input: ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error + | 1//2 ./calc.at:1387: $PREPARSER ./calc input stderr: stderr: -syntax error ./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 499. calc.at:1375: ok ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -86711,23 +86387,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1374: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ./calc.at:1387: cat stderr input: -input: - | 1 = 2 = 3 + | error ./calc.at:1387: $PREPARSER ./calc input - | 1 + 2 * 3 -./calc.at:1374: $PREPARSER ./calc --exp input +./calc.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: syntax error ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -stderr: -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86738,57 +86409,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -498. calc.at:1374: ok - +506. calc.at:1394: testing Calculator %glr-parser %verbose ... +./calc.at:1394: mv calc.y.tmp calc.y ./calc.at:1387: cat stderr -505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc} ... -./calc.at:1393: mv calc.y.tmp calc.y - +./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | - | +1 + | 1 = 2 = 3 ./calc.at:1387: $PREPARSER ./calc input stderr: syntax error ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -506. calc.at:1394: testing Calculator %glr-parser %verbose ... -./calc.at:1394: mv calc.y.tmp calc.y - -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1387: cat stderr -./calc.at:1387: $PREPARSER ./calc /dev/null -stderr: -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error +stdout: stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stdout: +syntax error ./calc.at:1389: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -86800,11 +86437,10 @@ || /\t/ )' calc.c calc.h -./calc.at:1387: cat stderr -input: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1387: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -86819,31 +86455,11 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1389: $PREPARSER ./calc input -stderr: +======== Testing with C++ standard flags: '' stderr: ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: - | 1 2 -./calc.at:1389: $PREPARSER ./calc input -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: -syntax error -./calc.at:1394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -syntax error ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86854,6 +86470,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | 1 2 +./calc.at:1389: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1387: cat stderr +stderr: ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86864,24 +86490,85 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: cat stderr +stdout: +./calc.at:1390: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + input: - | (!!) + (1 2) = 1 + | + | +1 ./calc.at:1387: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1389: cat stderr +stdout: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +stderr: +./calc.at:1391: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1390: $PREPARSER ./calc input +syntax error +stderr: input: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1//2 ./calc.at:1389: $PREPARSER ./calc input stderr: stderr: +input: syntax error ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | 1 2 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86892,28 +86579,35 @@ }eg ' expout || exit 77 stderr: -./calc.at:1389: cat stderr -syntax error -error: 2222 != 1 -input: - | error -./calc.at:1389: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error +input: +./calc.at:1387: cat stderr + | 1 2 +./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1387: $PREPARSER ./calc /dev/null +1.3: syntax error +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86923,29 +86617,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1389: $PREPARSER ./calc input -stderr: syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1387: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1387: $PREPARSER ./calc input stderr: -syntax error -syntax error -error: 2222 != 1 ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error stderr: +./calc.at:1390: cat stderr syntax error -syntax error -error: 2222 != 1 -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1389: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86955,6 +86635,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +input: +stdout: +input: +./types.at:139: ./check + | 1//2 +./calc.at:1390: $PREPARSER ./calc input + | error +./calc.at:1389: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stderr: +./calc.at:1391: cat stderr +1.3: syntax error ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -86965,26 +86659,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -input: -./calc.at:1387: cat stderr - | - | +1 -./calc.at:1389: $PREPARSER ./calc input -stderr: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: syntax error - | (* *) + (*) + (*) -./calc.at:1387: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +1.3: syntax error +input: + | 1//2 +./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1387: cat stderr +1.3: syntax error +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86994,17 +86682,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1389: cat stderr -syntax error -syntax error -syntax error -./calc.at:1389: $PREPARSER ./calc /dev/null -stderr: -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error +1.3: syntax error +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1387: $PREPARSER ./calc input ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87015,18 +86698,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1389: $PREPARSER ./calc input stderr: syntax error syntax error syntax error syntax error error: 4444 != 1 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87042,35 +86722,21 @@ syntax error syntax error error: 4444 != 1 -./calc.at:1387: cat stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1389: cat stderr input: + | error +./calc.at:1390: $PREPARSER ./calc input +stderr: input: - | (!!) + (1 2) = 1 +1.1: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 ./calc.at:1389: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1387: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr stderr: syntax error -error: 2222 != 1 stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87080,26 +86746,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.1: syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !- ++ -./calc.at:1387: $PREPARSER ./calc input -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -stderr: -input: - | (- *) + (1 2) = 1 -./calc.at:1389: $PREPARSER ./calc input stderr: syntax error -syntax error -error: 2222 != 1 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1391: $PREPARSER ./calc input stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87109,7 +86764,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.1: syntax error ./calc.at:1387: cat stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +input: ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87120,24 +86780,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !* ++ +./calc.at:1390: cat stderr + | (!!) + (1 2) = 1 ./calc.at:1387: $PREPARSER ./calc input -./calc.at:1389: cat stderr -stderr: -memory exhausted -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -input: - | (* *) + (*) + (*) -./calc.at:1389: $PREPARSER ./calc input stderr: syntax error -syntax error -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +error: 2222 != 1 +input: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87147,16 +86798,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1390: $PREPARSER ./calc input stderr: +stderr: +./calc.at:1389: cat stderr syntax error -syntax error -syntax error -./calc.at:1387: cat stderr +error: 2222 != 1 +1.7: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: - | (#) + (#) = 2222 -./calc.at:1387: $PREPARSER ./calc input +1.7: syntax error + | + | +1 +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1391: cat stderr stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +syntax error +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87166,15 +86827,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1389: cat stderr -stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87184,62 +86837,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1392: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -input: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !+ ++ - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1392: $PREPARSER ./calc input + | 1 = 2 = 3 stderr: +./calc.at:1391: $PREPARSER ./calc input +syntax error stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1387: cat stderr +./calc.at:1390: cat stderr stderr: +1.7: syntax error input: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1392: $PREPARSER ./calc input -stderr: -stderr: -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: - | (1 + #) = 1111 +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 + | + | +1 +./calc.at:1390: $PREPARSER ./calc input ./calc.at:1387: $PREPARSER ./calc input -syntax error -input: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1389: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87249,29 +86877,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' stderr: -./calc.at:1392: cat stderr +syntax error +syntax error +error: 2222 != 1 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./calc.at:1391: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -input: - | 1//2 -./calc.at:1392: $PREPARSER ./calc input +./calc.at:1389: cat stderr +syntax error +syntax error +error: 2222 != 1 +2.1: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1389: $PREPARSER ./calc /dev/null +2.1: syntax error +stderr: +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87282,8 +86909,7 @@ }eg ' expout || exit 77 syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87294,29 +86920,15 @@ }eg ' expout || exit 77 input: -stderr: -syntax error - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | +1 ./calc.at:1391: $PREPARSER ./calc input -stderr: -./calc.at:1389: cat stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1387: cat stderr stderr: +2.1: syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87326,33 +86938,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 2 - | 1 + 2 * 3 + !* ++ -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1389: $PREPARSER ./calc input -stderr: -input: -1.3: syntax error -stderr: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: cat stderr stderr: - | (# + 1) = 1111 +./calc.at:1390: cat stderr + | (* *) + (*) + (*) ./calc.at:1387: $PREPARSER ./calc input -stderr: -1.3: syntax error -stderr: -syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted -input: - | error -./calc.at:1392: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' +2.1: syntax error +./calc.at:1390: $PREPARSER ./calc /dev/null ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87364,12 +86955,23 @@ }eg ' expout || exit 77 stderr: -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error +syntax error +syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1389: cat stderr ./calc.at:1391: cat stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +stderr: +syntax error +syntax error +syntax error +./calc.at:1391: $PREPARSER ./calc /dev/null +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87379,11 +86981,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: input: - | 1//2 -./calc.at:1391: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1389: $PREPARSER ./calc input +1.1: syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1392: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87393,7 +87006,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: "$PERL" -pi -e 'use strict; +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1390: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87403,13 +87023,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -./calc.at:1392: cat stderr -./calc.at:1389: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +input: +./calc.at:1387: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87419,43 +87037,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: cat stderr -input: - | 1 = 2 = 3 -input: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 ./calc.at:1391: cat stderr input: -./calc.at:1392: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1389: $PREPARSER ./calc input - | (1 + # + 1) = 1111 +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ ./calc.at:1387: $PREPARSER ./calc input stderr: stderr: -syntax error -syntax error: invalid character: '#' -syntax error: invalid character: '#' input: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | error -syntax error: invalid character: '#' +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1389: cat stderr ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1391: $PREPARSER ./calc input stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: stderr: -1.1: syntax error +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: -stderr: -1.1: syntax error -syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87465,6 +87078,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +input: + | (!!) + (1 2) = 1 +./calc.at:1389: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1387: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +syntax error +error: 2222 != 1 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87475,7 +87108,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1390: cat stderr +syntax error +error: 2222 != 1 +input: +./calc.at:1391: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87485,9 +87125,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -./calc.at:1391: cat stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87497,40 +87135,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1387: cat stderr -./calc.at:1389: $PREPARSER ./calc input -input: - | 1 = 2 = 3 stderr: -./calc.at:1391: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + 1) / (1 - 1) -./calc.at:1387: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1391: $PREPARSER ./calc input stderr: -./calc.at:1392: cat stderr -1.7: syntax error +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: +./calc.at:1389: cat stderr +./calc.at:1387: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' stderr: -error: null divisor -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 input: +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1387: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -1.7: syntax error - | - | +1 -./calc.at:1392: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: $PREPARSER ./calc input +memory exhausted +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: null divisor -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87541,8 +87185,19 @@ }eg ' expout || exit 77 stderr: +memory exhausted syntax error -./calc.at:1391: "$PERL" -pi -e 'use strict; +syntax error +error: 2222 != 1 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1391: cat stderr +syntax error +syntax error +error: 2222 != 1 +./calc.at:1390: cat stderr +input: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87552,7 +87207,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: "$PERL" -pi -e 'use strict; + | (- *) + (1 2) = 1 +./calc.at:1391: $PREPARSER ./calc input +input: + | (- *) + (1 2) = 1 +stderr: +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87562,9 +87223,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1387: cat stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 ./calc.at:1389: cat stderr -./calc.at:1391: cat stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; +input: +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87575,58 +87264,36 @@ }eg ' expout || exit 77 input: -input: - | (# + 1) = 1111 -./calc.at:1387: cat stderr - | - | +1 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1391: $PREPARSER ./calc input -stderr: + | (#) + (#) = 2222 +./calc.at:1387: $PREPARSER ./calc input stderr: -2.1: syntax error + | (* *) + (*) + (*) +./calc.at:1389: $PREPARSER ./calc input syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: cat stderr -stderr: -stdout: -500. calc.at:1387: ok +syntax error: invalid character: '#' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +syntax error +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1390: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - syntax error: invalid character: '#' -./calc.at:1392: $PREPARSER ./calc /dev/null +syntax error: invalid character: '#' stderr: -2.1: syntax error +./calc.at:1390: cat stderr syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +./calc.at:1391: cat stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +input: + | (* *) + (*) + (*) ./calc.at:1390: $PREPARSER ./calc input -./calc.at:1391: "$PERL" -pi -e 'use strict; + | (* *) + (*) + (*) +./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87636,9 +87303,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -stderr: -stderr: ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87649,19 +87313,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: - | 1 2 -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1389: cat stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: -1.3: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: -./calc.at:1392: "$PERL" -pi -e 'use strict; +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1389: cat stderr +./calc.at:1387: cat stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87671,16 +87342,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error - | (1 + # + 1) = 1111 +input: +input: + | 1 + 2 * 3 + !+ ++ ./calc.at:1389: $PREPARSER ./calc input -./calc.at:1391: cat stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1391: $PREPARSER ./calc /dev/null -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87690,18 +87356,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1387: $PREPARSER ./calc input +stderr: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: syntax error: invalid character: '#' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: cat stderr +stderr: stderr: -./calc.at:1392: cat stderr -1.1: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1390: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1392: $PREPARSER ./calc input +syntax error: invalid character: '#' +input: +./calc.at:1391: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1389: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1390: $PREPARSER ./calc input stderr: -1.1: syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87712,17 +87394,13 @@ }eg ' expout || exit 77 stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 input: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ ./calc.at:1390: $PREPARSER ./calc input stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87732,20 +87410,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -1.3: syntax error ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -stderr: -1.3: syntax error input: - | (1 + 1) / (1 - 1) -./calc.at:1392: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1387: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1389: cat stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87755,9 +87429,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +stderr: + | (# + 1) = 1111 +./calc.at:1387: $PREPARSER ./calc input +input: +stderr: + | 1 + 2 * 3 + !* ++ ./calc.at:1389: $PREPARSER ./calc input -./calc.at:1391: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87767,42 +87449,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose ... -./calc.at:1395: mv calc.y.tmp calc.y - stderr: -error: null divisor -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1391: $PREPARSER ./calc input -error: null divisor -./calc.at:1392: cat stderr +syntax error: invalid character: '#' +memory exhausted +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +memory exhausted ./calc.at:1390: cat stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr input: -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 - | error -./calc.at:1390: $PREPARSER ./calc input +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !* ++ input: -stderr: - | (!!) + (1 2) = 1 -1.1: syntax error -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: $PREPARSER ./calc input ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87813,12 +87482,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !* ++ stderr: +./calc.at:1391: $PREPARSER ./calc input +1.14: memory exhausted stderr: -syntax error -error: 2222 != 1 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: cat stderr +stderr: +1.14: memory exhausted +input: +stderr: +1.14: memory exhausted +./calc.at:1389: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87828,11 +87509,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error stderr: -syntax error -error: 2222 != 1 -./calc.at:1390: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87842,11 +87523,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr +syntax error: invalid character: '#' +./calc.at:1390: cat stderr ./calc.at:1391: cat stderr -501. calc.at:1389: input: - ok -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87856,34 +87536,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 +input: + | (#) + (#) = 2222 +input: + | (#) + (#) = 2222 +./calc.at:1389: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 ./calc.at:1391: $PREPARSER ./calc input -./calc.at:1390: cat stderr +./calc.at:1390: $PREPARSER ./calc input stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -./calc.at:1392: cat stderr -1.11: syntax error -1.1-16: error: 2222 != 1 - | 1 = 2 = 3 - -./calc.at:1390: $PREPARSER ./calc input stderr: -input: -1.7: syntax error +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1392: $PREPARSER ./calc input +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -1.7: syntax error -syntax error -syntax error -error: 2222 != 1 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1387: cat stderr ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87904,13 +87587,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | (1 + 1) / (1 - 1) +./calc.at:1387: $PREPARSER ./calc input stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1390: cat stderr +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +error: null divisor +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor ./calc.at:1391: cat stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1390: cat stderr +./calc.at:1389: cat stderr +input: + | (1 + #) = 1111 +input: + | (1 + #) = 1111 +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87921,31 +87624,25 @@ }eg ' expout || exit 77 input: - | - | +1 ./calc.at:1390: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 +stderr: + | (1 + #) = 1111 ./calc.at:1391: $PREPARSER ./calc input stderr: -2.1: syntax error +syntax error: invalid character: '#' +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.6: syntax error: invalid character: '#' stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' stderr: -./calc.at:1392: cat stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -2.1: syntax error -input: - | (* *) + (*) + (*) -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1391: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87955,13 +87652,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -508. calc.at:1397: testing Calculator %glr-parser api.pure %locations ... -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1387: cat stderr +500. calc.at:1387: ok +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87971,17 +87664,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1397: mv calc.y.tmp calc.y - -./calc.at:1395: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -syntax error -syntax error -syntax error -./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1391: cat stderr -input: -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1390: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87991,26 +87675,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1390: cat stderr -./calc.at:1391: $PREPARSER ./calc input stderr: -./calc.at:1390: $PREPARSER ./calc /dev/null -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: cat stderr +input: +stdout: + | (# + 1) = 1111 +./calc.at:1392: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1389: cat stderr stderr: -1.1: syntax error + +1.2: syntax error: invalid character: '#' ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1389: $PREPARSER ./calc input stderr: -1.1: syntax error -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error +./calc.at:1391: cat stderr +1.2: syntax error: invalid character: '#' +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1392: $PREPARSER ./calc input input: +stderr: +syntax error: invalid character: '#' +stderr: +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88021,9 +87732,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ + | (# + 1) = 1111 +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +input: +stderr: +1.2: syntax error: invalid character: '#' + | 1 2 ./calc.at:1392: $PREPARSER ./calc input +./calc.at:1390: cat stderr stderr: +syntax error ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -88036,25 +87761,9 @@ }eg ' expout || exit 77 stderr: -./calc.at:1391: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -input: -./calc.at:1391: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1390: cat stderr -stderr: -stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | 1 + 2 * 3 + !- ++ +syntax error input: -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88064,27 +87773,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + # + 1) = 1111 ./calc.at:1390: $PREPARSER ./calc input stderr: -./calc.at:1397: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +1.6: syntax error: invalid character: '#' ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1392: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88094,6 +87790,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.6: syntax error: invalid character: '#' +./calc.at:1389: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1391: $PREPARSER ./calc input ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88104,30 +87805,62 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: cat stderr input: - | 1 + 2 * 3 + !* ++ + | (1 + # + 1) = 1111 +./calc.at:1389: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +stderr: +syntax error: invalid character: '#' +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose ... +./calc.at:1390: cat stderr + | 1//2 +syntax error: invalid character: '#' ./calc.at:1392: $PREPARSER ./calc input +./calc.at:1395: mv calc.y.tmp calc.y + stderr: -memory exhausted -./calc.at:1391: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: cat stderr -input: +./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -memory exhausted - | (!!) + (1 2) = 1 +input: + | (1 + 1) / (1 - 1) ./calc.at:1390: $PREPARSER ./calc input +syntax error stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +1.11-17: error: null divisor ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1391: $PREPARSER ./calc input -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +./calc.at:1391: cat stderr stderr: +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11-17: error: null divisor ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88138,9 +87871,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.14: memory exhausted -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1391: $PREPARSER ./calc input stderr: +1.11-17: error: null divisor +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88151,8 +87887,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.14: memory exhausted +./calc.at:1389: cat stderr ./calc.at:1392: cat stderr +stderr: +1.11-17: error: null divisor +input: +./calc.at:1390: cat stderr ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88163,34 +87903,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1392: $PREPARSER ./calc input -stderr: -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1390: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1389: $PREPARSER ./calc input +502. calc.at:1390: stderr: +error: null divisor +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok stderr: +error: null divisor ./calc.at:1391: cat stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 input: - | (#) + (#) = 2222 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1390: "$PERL" -pi -e 'use strict; + | error +./calc.at:1392: $PREPARSER ./calc input +503. calc.at:1391: ok + +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88201,9 +87928,12 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error + +./calc.at:1389: cat stderr ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88214,13 +87944,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +501. calc.at:1389: ok +./calc.at:1392: cat stderr + +input: + | 1 = 2 = 3 +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1395: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1390: cat stderr +syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -./calc.at:1391: "$PERL" -pi -e 'use strict; +syntax error +508. calc.at:1397: testing Calculator %glr-parser api.pure %locations ... +./calc.at:1397: mv calc.y.tmp calc.y + +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88230,55 +87969,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1394: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c +./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations ... +./calc.at:1398: mv calc.y.tmp calc.y - | (* *) + (*) + (*) -./calc.at:1390: $PREPARSER ./calc input ./calc.at:1392: cat stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: cat stderr -input: +./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1394: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error - | (1 + #) = 1111 + | +1 ./calc.at:1392: $PREPARSER ./calc input stderr: -stderr: -syntax error: invalid character: '#' +510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... +syntax error ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1400: mv calc.y.tmp calc.y + +stderr: +syntax error +./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88288,24 +87998,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./calc.at:1392: cat stderr +./calc.at:1392: $PREPARSER ./calc /dev/null stderr: +syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' - | (1 + #) = 1111 -./calc.at:1391: $PREPARSER ./calc input +syntax error +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1392: cat stderr input: -stderr: -1.6: syntax error: invalid character: '#' - | 1 2 -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1397: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88316,9 +88044,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1400: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1392: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1392: $PREPARSER ./calc input +stderr: syntax error -1.6: syntax error: invalid character: '#' -./calc.at:1391: "$PERL" -pi -e 'use strict; +error: 2222 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +error: 2222 != 1 +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88328,15 +88066,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1390: $PREPARSER ./calc input ./calc.at:1392: cat stderr -stderr: input: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1394: "$PERL" -pi -e 'use strict; + | (- *) + (1 2) = 1 +./calc.at:1392: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88346,15 +88089,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1392: cat stderr +input: stderr: -stderr: + | (* *) + (*) + (*) ./calc.at:1392: $PREPARSER ./calc input stderr: stdout: -syntax error: invalid character: '#' +syntax error +syntax error +syntax error ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1391: cat stderr ./calc.at:1393: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -88366,22 +88111,11 @@ || /\t/ )' calc.c - | 1 + 2 * 3 + !- ++ -./calc.at:1390: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -stderr: -./calc.at:1391: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1394: cat stderr -1.2: syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error +syntax error +syntax error input: -stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -88396,11 +88130,8 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1393: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' stderr: -input: ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88411,8 +88142,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1390: "$PERL" -pi -e 'use strict; +stderr: +input: + | 1 2 +./calc.at:1393: $PREPARSER ./calc input +stderr: +./calc.at:1392: cat stderr +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1392: $PREPARSER ./calc input +stderr: +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88422,9 +88168,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1392: $PREPARSER ./calc input stderr: stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: cat stderr +stderr: +stderr: +stdout: +stderr: +./types.at:139: $PREPARSER ./test +input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1393: $PREPARSER ./calc input +stderr: +======== Testing with C++ standard flags: '' +stderr: +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +======== Testing with C++ standard flags: '' +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88434,30 +88206,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 2 -syntax error -./calc.at:1393: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1392: cat stderr -stderr: -./calc.at:1391: cat stderr -./calc.at:1390: cat stderr -syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (1 + # + 1) = 1111 -input: -input: -syntax error -./calc.at:1392: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1391: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88467,22 +88218,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1393: cat stderr stderr: -./calc.at:1390: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' +memory exhausted ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted +input: stderr: + | error +./calc.at:1393: $PREPARSER ./calc input +memory exhausted stderr: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -syntax error: invalid character: '#' +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.14: memory exhausted +syntax error ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88493,8 +88245,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1394: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88504,7 +88255,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1393: cat stderr +./calc.at:1392: cat stderr +input: +input: + | (#) + (#) = 2222 + | 1 = 2 = 3 +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input +stderr: +stdout: +stderr: +stderr: +./types.at:139: ./check +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +stderr: +syntax error +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88514,10 +88293,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | error -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88527,46 +88303,57 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1391: cat stderr -./calc.at:1390: cat stderr -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1392: cat stderr ./calc.at:1393: cat stderr input: stderr: +stdout: input: -syntax error - | (1 + 1) / (1 - 1) -./calc.at:1391: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1390: $PREPARSER ./calc input -input: -stderr: -input: -1.11-17: error: null divisor -stderr: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 + | + | +1 ./calc.at:1393: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (1 + 1) / (1 - 1) -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1394: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + + | (1 + #) = 1111 ./calc.at:1392: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor -syntax error stderr: -error: null divisor +syntax error: invalid character: '#' ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +syntax error ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +stderr: +syntax error +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +syntax error: invalid character: '#' +./calc.at:1394: $PREPARSER ./calc input +stderr: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88576,10 +88363,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; +input: +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88589,8 +88374,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -error: null divisor -./calc.at:1390: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1394: $PREPARSER ./calc input +stderr: +syntax error +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: cat stderr +stderr: +syntax error +./calc.at:1392: cat stderr +./calc.at:1393: $PREPARSER ./calc /dev/null +stderr: +syntax error +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88600,8 +88401,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: cat stderr +stderr: +syntax error +stderr: +syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1394: cat stderr +syntax error: invalid character: '#' ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88612,6 +88420,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | 1//2 +./calc.at:1394: $PREPARSER ./calc input +stderr: ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88622,34 +88434,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: cat stderr -input: -503. calc.at:1391: ok - | 1 = 2 = 3 -./calc.at:1394: $PREPARSER ./calc input -stderr: +./calc.at:1393: cat stderr syntax error ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: syntax error - - | (1 + #) = 1111 -./calc.at:1390: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: cat stderr -./calc.at:1393: cat stderr input: -stderr: -504. calc.at:1392: ok -1.6: syntax error: invalid character: '#' - | error +./calc.at:1392: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1393: $PREPARSER ./calc input stderr: +input: +syntax error +syntax error +syntax error syntax error +error: 4444 != 1 ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88660,27 +88462,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1392: $PREPARSER ./calc input +stderr: stderr: +syntax error: invalid character: '#' +syntax error syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' ./calc.at:1394: cat stderr -./calc.at:1390: cat stderr - -input: - | - | +1 -input: - | (# + 1) = 1111 -./calc.at:1390: $PREPARSER ./calc input ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88691,19 +88485,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | error ./calc.at:1394: $PREPARSER ./calc input stderr: -stderr: -1.2: syntax error: invalid character: '#' -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1393: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88713,16 +88499,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1393: cat stderr input: - | 1 = 2 = 3 +./calc.at:1392: cat stderr + | (!!) + (1 2) = 1 ./calc.at:1393: $PREPARSER ./calc input stderr: -509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations ... syntax error +error: 2222 != 1 ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: mv calc.y.tmp calc.y - -./calc.at:1390: cat stderr +input: ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88733,16 +88523,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + 1) / (1 - 1) stderr: -./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1392: $PREPARSER ./calc input syntax error +error: 2222 != 1 +stderr: +stdout: +stderr: +./types.at:139: $PREPARSER ./test +error: null divisor +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1394: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1390: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88753,16 +88548,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1394: $PREPARSER ./calc /dev/null +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +======== Testing with C++ standard flags: '' + | 1 = 2 = 3 +./calc.at:1394: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stderr: syntax error -1.6: syntax error: invalid character: '#' ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: cat stderr -stderr: -syntax error -./calc.at:1390: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88772,17 +88570,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... -./calc.at:1400: mv calc.y.tmp calc.y - +stderr: +stderr: +./calc.at:1393: cat stderr +syntax error +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | - | +1 +======== Testing with C++ standard flags: '' +./calc.at:1392: cat stderr + | (- *) + (1 2) = 1 ./calc.at:1393: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +syntax error +error: 2222 != 1 ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88793,23 +88595,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +504. calc.at:1392: ok stderr: syntax error -./calc.at:1390: cat stderr +syntax error +error: 2222 != 1 ./calc.at:1394: cat stderr + input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | (1 + 1) / (1 - 1) -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1390: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: + | + | +1 ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88820,19 +88616,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1394: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor -syntax error -syntax error syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: syntax error -error: 4444 != 1 ./calc.at:1393: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +input: + | (* *) + (*) + (*) +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88842,12 +88636,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: $PREPARSER ./calc /dev/null -stderr: stderr: syntax error +syntax error +syntax error ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +stderr: +syntax error +syntax error +syntax error +./calc.at:1394: cat stderr +./calc.at:1394: $PREPARSER ./calc /dev/null +stderr: +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88857,17 +88661,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1390: cat stderr stderr: -./types.at:139: $PREPARSER ./test syntax error -stderr: -502. calc.at:1390: ok -./calc.at:1394: cat stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1393: "$PERL" -pi -e 'use strict; +stdout: +511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure ... +./calc.at:1401: mv calc.y.tmp calc.y + +./types.at:139: ./check +./calc.at:1393: cat stderr +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88877,24 +88679,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -452. types.at:139: ok - | (!!) + (1 2) = 1 +stderr: + | 1 + 2 * 3 + !+ ++ +stdout: +./calc.at:1393: $PREPARSER ./calc input +./types.at:139: ./check +stderr: +./calc.at:1394: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1394: $PREPARSER ./calc input stderr: - syntax error -error: 2222 != 1 +syntax error +syntax error +syntax error +error: 4444 != 1 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1393: cat stderr syntax error -error: 2222 != 1 - +syntax error +syntax error +syntax error +error: 4444 != 1 input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1 + 2 * 3 + !- ++ ./calc.at:1393: $PREPARSER ./calc input stderr: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88905,21 +88723,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1400: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1394: cat stderr -input: ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88930,22 +88735,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 +input: + | (!!) + (1 2) = 1 ./calc.at:1394: $PREPARSER ./calc input stderr: syntax error -syntax error error: 2222 != 1 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./calc.at:1393: cat stderr -syntax error syntax error error: 2222 != 1 +./calc.at:1393: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS input: - | (!!) + (1 2) = 1 -./calc.at:1393: $PREPARSER ./calc input -stderr: ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88956,22 +88759,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -error: 2222 != 1 + | 1 + 2 * 3 + !* ++ +./calc.at:1393: $PREPARSER ./calc input +stderr: +memory exhausted ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure ... -syntax error -error: 2222 != 1 -./calc.at:1401: mv calc.y.tmp calc.y - +memory exhausted ./calc.at:1394: cat stderr -./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose ... input: -./calc.at:1402: mv calc.y.tmp calc.y - - | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +./calc.at:1401: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1394: $PREPARSER ./calc input ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -88986,21 +88784,19 @@ stderr: syntax error syntax error -syntax error +error: 2222 != 1 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: syntax error syntax error -syntax error +error: 2222 != 1 ./calc.at:1393: cat stderr input: - | (- *) + (1 2) = 1 + | (#) + (#) = 2222 ./calc.at:1393: $PREPARSER ./calc input stderr: -syntax error -syntax error -error: 2222 != 1 +syntax error: invalid character: '#' +syntax error: invalid character: '#' ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -89013,10 +88809,10 @@ }eg ' expout || exit 77 stderr: -syntax error -syntax error -error: 2222 != 1 +syntax error: invalid character: '#' +syntax error: invalid character: '#' ./calc.at:1394: cat stderr +input: ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89027,31 +88823,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: cat stderr -input: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1394: $PREPARSER ./calc input | (* *) + (*) + (*) -./calc.at:1393: $PREPARSER ./calc input -stderr: +./calc.at:1394: $PREPARSER ./calc input stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error syntax error syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1393: cat stderr syntax error syntax error syntax error -stderr: input: - | 1 + 2 * 3 + !- ++ -./calc.at:1394: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1393: $PREPARSER ./calc input stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89062,8 +88852,9 @@ }eg ' expout || exit 77 stderr: -./calc.at:1393: cat stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1394: cat stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89075,28 +88866,26 @@ ' expout || exit 77 input: | 1 + 2 * 3 + !+ ++ -./calc.at:1393: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input stderr: -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: cat stderr stderr: input: +input: | 1 + 2 * 3 + !- ++ +./calc.at:1394: $PREPARSER ./calc input + | (# + 1) = 1111 ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1402: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1401: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: -./calc.at:1394: cat stderr -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1394: $PREPARSER ./calc input stderr: -memory exhausted ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -memory exhausted -./calc.at:1393: "$PERL" -pi -e 'use strict; +stderr: +syntax error: invalid character: '#' +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89106,7 +88895,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89116,24 +88905,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1394: cat stderr ./calc.at:1393: cat stderr input: | 1 + 2 * 3 + !* ++ -./calc.at:1393: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input stderr: -./calc.at:1394: cat stderr +input: memory exhausted -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1393: $PREPARSER ./calc input stderr: -memory exhausted -input: - | (#) + (#) = 2222 -./calc.at:1394: $PREPARSER ./calc input stderr: +memory exhausted syntax error: invalid character: '#' +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: syntax error: invalid character: '#' -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89143,11 +88933,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1393: cat stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89157,25 +88943,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1394: cat stderr input: +./calc.at:1393: cat stderr | (#) + (#) = 2222 -./calc.at:1393: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input stderr: -./calc.at:1394: cat stderr syntax error: invalid character: '#' syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +stderr: +stdout: +./calc.at:1393: $PREPARSER ./calc input stderr: syntax error: invalid character: '#' syntax error: invalid character: '#' -input: - | (1 + #) = 1111 -./calc.at:1394: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test stderr: -syntax error: invalid character: '#' -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +======== Testing with C++ standard flags: '' +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89185,9 +88979,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1393: cat stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89197,34 +88990,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1393: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' ./calc.at:1394: cat stderr -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' +./calc.at:1393: cat stderr input: - | (# + 1) = 1111 + | (1 + #) = 1111 ./calc.at:1394: $PREPARSER ./calc input stderr: syntax error: invalid character: '#' ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +505. calc.at:1393: ok syntax error: invalid character: '#' -./calc.at:1393: cat stderr ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89235,38 +89011,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1393: $PREPARSER ./calc input -stderr: + ./calc.at:1394: cat stderr -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -syntax error: invalid character: '#' - | (1 + # + 1) = 1111 + | (# + 1) = 1111 ./calc.at:1394: $PREPARSER ./calc input stderr: +stderr: syntax error: invalid character: '#' ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check stderr: syntax error: invalid character: '#' -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1393: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1393: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose ... stderr: +stdout: ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89277,29 +89038,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1402: mv calc.y.tmp calc.y + +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1394: cat stderr -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + 1) / (1 - 1) -stderr: + | (1 + # + 1) = 1111 ./calc.at:1394: $PREPARSER ./calc input -syntax error: invalid character: '#' stderr: -error: null divisor +syntax error: invalid character: '#' ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: null divisor -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89310,18 +89066,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: cat stderr +./calc.at:1394: cat stderr input: | (1 + 1) / (1 - 1) -./calc.at:1393: $PREPARSER ./calc input +./calc.at:1394: $PREPARSER ./calc input stderr: error: null divisor -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: cat stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: error: null divisor -506. calc.at:1394: ok -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89331,15 +89085,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1393: cat stderr -505. calc.at:1393: ok - -513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose ... -./calc.at:1403: mv calc.y.tmp calc.y - stderr: -./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stdout: ./calc.at:1395: "$PERL" -ne ' chomp; @@ -89352,7 +89098,13 @@ || /\t/ )' calc.c +./calc.at:1394: cat stderr +./calc.at:1402: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: +stdout: input: +506. calc.at:1394: ok +./types.at:139: ./check | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -89367,20 +89119,18 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1395: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + input: | 1 2 -514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... -./calc.at:1405: mv calc.y.tmp calc.y - ./calc.at:1395: $PREPARSER ./calc input stderr: syntax error, unexpected number ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y syntax error, unexpected number ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -89397,43 +89147,10 @@ | 1//2 ./calc.at:1395: $PREPARSER ./calc input stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1403: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1395: cat stderr -input: - | error -./calc.at:1395: $PREPARSER ./calc input -stderr: -syntax error, unexpected invalid token ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected invalid token -./calc.at:1405: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: -stdout: -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1397: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -89445,14 +89162,9 @@ || /\t/ )' calc.c -./calc.at:1395: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1395: $PREPARSER ./calc input -input: stderr: -syntax error, unexpected '=' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -89468,10 +89180,6 @@ | (2^2)^3 = 64 ./calc.at:1397: $PREPARSER ./calc input stderr: -stderr: -syntax error, unexpected '=' -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89482,24 +89190,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 2 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1395: cat stderr input: + | 1 2 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1397: $PREPARSER ./calc input - | - | +1 -./calc.at:1395: $PREPARSER ./calc input -stderr: stderr: +input: 1.3: syntax error ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '+' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose ... + | error +./calc.at:1395: $PREPARSER ./calc input +stderr: stderr: +syntax error, unexpected invalid token 1.3: syntax error +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '+' +syntax error, unexpected invalid token ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89510,6 +89221,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1403: mv calc.y.tmp calc.y + ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89520,23 +89233,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -./calc.at:1395: $PREPARSER ./calc /dev/null ./calc.at:1397: cat stderr -stderr: -syntax error, unexpected end of input -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1395: cat stderr input: -syntax error, unexpected end of input | 1//2 ./calc.at:1397: $PREPARSER ./calc input +input: stderr: 1.3: syntax error ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1395: $PREPARSER ./calc input +stderr: +syntax error, unexpected '=' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.3: syntax error -./calc.at:1395: "$PERL" -pi -e 'use strict; +stderr: +syntax error, unexpected '=' +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89546,7 +89262,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1397: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89556,33 +89273,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1397: cat stderr -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | error ./calc.at:1397: $PREPARSER ./calc input +./calc.at:1395: cat stderr stderr: -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 1.1: syntax error ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: + | + | +1 +./calc.at:1395: $PREPARSER ./calc input 1.1: syntax error -./calc.at:1395: "$PERL" -pi -e 'use strict; +stderr: +syntax error, unexpected '+' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89592,7 +89300,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1397: "$PERL" -pi -e 'use strict; +syntax error, unexpected '+' +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89602,40 +89311,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1395: $PREPARSER ./calc input -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1397: cat stderr -stderr: -syntax error, unexpected number -error: 2222 != 1 +./calc.at:1395: cat stderr input: | 1 = 2 = 3 ./calc.at:1397: $PREPARSER ./calc input +./calc.at:1395: $PREPARSER ./calc /dev/null +stderr: stderr: 1.7: syntax error ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1395: cat stderr +syntax error, unexpected end of input +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected end of input 1.7: syntax error -input: - | (- *) + (1 2) = 1 -./calc.at:1395: $PREPARSER ./calc input ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89646,16 +89337,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1397: cat stderr +./calc.at:1403: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89666,28 +89348,62 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: +./calc.at:1397: cat stderr ./calc.at:1395: cat stderr +input: | | +1 ./calc.at:1397: $PREPARSER ./calc input -stderr: -2.1: syntax error input: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1395: $PREPARSER ./calc input stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: +stdout: +./calc.at:1398: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1395: $PREPARSER ./calc input +stderr: +2.1: syntax error +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error stderr: +stderr: +2.1: syntax error +input: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1398: $PREPARSER ./calc input +stderr: ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89698,6 +89414,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89708,19 +89426,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: ./calc.at:1397: cat stderr + | 1 2 +./calc.at:1398: $PREPARSER ./calc input ./calc.at:1395: cat stderr +stderr: ./calc.at:1397: $PREPARSER ./calc /dev/null +1.3: syntax error, unexpected number stderr: +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.1: syntax error ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: -1.1: syntax error - | 1 + 2 * 3 + !+ ++ +stdout: + | (!!) + (1 2) = 1 +stderr: +stderr: ./calc.at:1395: $PREPARSER ./calc input +./calc.at:1400: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +1.3: syntax error, unexpected number +1.1: syntax error stderr: +syntax error, unexpected number +error: 2222 != 1 ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89731,17 +89473,61 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected number +error: 2222 != 1 input: - | 1 + 2 * 3 + !- ++ -./calc.at:1395: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1398: cat stderr +stderr: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1397: cat stderr +input: stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +./calc.at:1398: $PREPARSER ./calc input +input: +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1397: $PREPARSER ./calc input -stderr: +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1395: cat stderr +./calc.at:1400: $PREPARSER ./calc input stderr: 1.2: syntax error 1.18: syntax error @@ -89750,12 +89536,27 @@ 1.1-46: error: 4444 != 1 ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +input: +stderr: 1.2: syntax error 1.18: syntax error 1.23: syntax error 1.41: syntax error 1.1-46: error: 4444 != 1 -./calc.at:1395: "$PERL" -pi -e 'use strict; + | (- *) + (1 2) = 1 +./calc.at:1395: $PREPARSER ./calc input +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89775,26 +89576,55 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -./calc.at:1397: cat stderr +./calc.at:1400: cat stderr +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1398: cat stderr input: - | 1 + 2 * 3 + !* ++ -./calc.at:1395: $PREPARSER ./calc input + | 1//2 +./calc.at:1397: cat stderr +./calc.at:1400: $PREPARSER ./calc input stderr: -memory exhausted input: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1398: $PREPARSER ./calc input +input: +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | (!!) + (1 2) = 1 ./calc.at:1397: $PREPARSER ./calc input stderr: stderr: -memory exhausted +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +1.1: syntax error, unexpected invalid token +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: 1.11: syntax error 1.1-16: error: 2222 != 1 ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr stderr: +stderr: +1.1: syntax error, unexpected invalid token 1.11: syntax error 1.1-16: error: 2222 != 1 +./calc.at:1400: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1395: $PREPARSER ./calc input +input: ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89805,7 +89635,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: "$PERL" -pi -e 'use strict; +stderr: + | error +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89815,31 +89647,61 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr +./calc.at:1400: $PREPARSER ./calc input +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1398: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1397: cat stderr input: - | (#) + (#) = 2222 -./calc.at:1395: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1398: $PREPARSER ./calc input input: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr | (- *) + (1 2) = 1 ./calc.at:1397: $PREPARSER ./calc input stderr: +1.7: syntax error, unexpected '=' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' +1.7: syntax error, unexpected '=' + | 1 = 2 = 3 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1395: cat stderr +stderr: 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 -./calc.at:1397: "$PERL" -pi -e 'use strict; +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89849,7 +89711,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: "$PERL" -pi -e 'use strict; +input: +stderr: +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89859,29 +89723,64 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr + | 1 + 2 * 3 + !+ ++ +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1395: $PREPARSER ./calc input +stderr: +./calc.at:1398: cat stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr +stderr: +input: ./calc.at:1397: cat stderr + | + | +1 +./calc.at:1398: $PREPARSER ./calc input input: +stderr: input: - | (1 + #) = 1111 + | 1 + 2 * 3 + !- ++ +2.1: syntax error, unexpected '+' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1395: $PREPARSER ./calc input + | + | +1 +./calc.at:1400: $PREPARSER ./calc input +input: +stderr: stderr: -syntax error: invalid character: '#' | (* *) + (*) + (*) -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1397: $PREPARSER ./calc input stderr: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error, unexpected '+' stderr: -syntax error: invalid character: '#' +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error 1.10: syntax error 1.16: syntax error ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: 1.2: syntax error 1.10: syntax error 1.16: syntax error -./calc.at:1395: "$PERL" -pi -e 'use strict; +./calc.at:1400: cat stderr +./calc.at:1398: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89891,7 +89790,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89901,27 +89800,78 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1395: $PREPARSER ./calc input +./calc.at:1400: $PREPARSER ./calc /dev/null +./calc.at:1398: $PREPARSER ./calc /dev/null stderr: +stderr: +1.1: syntax error, unexpected end of input +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr ./calc.at:1397: cat stderr -syntax error: invalid character: '#' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: -syntax error: invalid character: '#' +stderr: +1.1: syntax error, unexpected end of input +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +input: +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1395: $PREPARSER ./calc input | 1 + 2 * 3 + !+ ++ ./calc.at:1397: $PREPARSER ./calc input +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: cat stderr stderr: +stderr: +memory exhausted ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1398: cat stderr +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +memory exhausted +./calc.at:1400: $PREPARSER ./calc input +input: stderr: input: | 1 + 2 * 3 + !- ++ ./calc.at:1397: $PREPARSER ./calc input +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1398: $PREPARSER ./calc input +stderr: stderr: ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: stderr: ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -89933,7 +89883,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1400: cat stderr ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89944,23 +89899,62 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1395: cat stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | (1 + # + 1) = 1111 -./calc.at:1395: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1400: $PREPARSER ./calc input +input: +./calc.at:1398: cat stderr stderr: -syntax error: invalid character: '#' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1397: cat stderr + | (#) + (#) = 2222 +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: stderr: syntax error: invalid character: '#' +syntax error: invalid character: '#' input: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1398: $PREPARSER ./calc input | 1 + 2 * 3 + !* ++ +stderr: ./calc.at:1397: $PREPARSER ./calc input stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: 1.14: memory exhausted +syntax error: invalid character: '#' +syntax error: invalid character: '#' ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +1.14: memory exhausted +input: + | (- *) + (1 2) = 1 +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89970,8 +89964,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.14: memory exhausted -./calc.at:1395: cat stderr ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89982,20 +89974,66 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (1 + 1) / (1 - 1) -./calc.at:1395: $PREPARSER ./calc input +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -error: null divisor +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr +stderr: +./calc.at:1395: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +input: ./calc.at:1397: cat stderr -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +input: +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1400: cat stderr + | (1 + #) = 1111 stderr: -error: null divisor +./calc.at:1395: $PREPARSER ./calc input input: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (#) + (#) = 2222 ./calc.at:1397: $PREPARSER ./calc input stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +input: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 + | (* *) + (*) + (*) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +stderr: +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90005,42 +90043,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -stderr: -./calc.at:1398: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: cat stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -507. calc.at:1395: input: - ok - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1398: $PREPARSER ./calc input +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1398: cat stderr ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90051,27 +90060,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -./calc.at:1397: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: cat stderr input: - | 1 2 + | (* *) + (*) + (*) ./calc.at:1398: $PREPARSER ./calc input +./calc.at:1397: cat stderr +./calc.at:1395: cat stderr input: stderr: -1.3: syntax error, unexpected number +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1400: $PREPARSER ./calc input +stderr: +input: +stderr: +input: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' | (1 + #) = 1111 ./calc.at:1397: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1395: $PREPARSER ./calc input stderr: stderr: -1.6: syntax error: invalid character: '#' -1.3: syntax error, unexpected number -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.6: syntax error: invalid character: '#' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90082,7 +90114,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !- ++ +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1400: $PREPARSER ./calc input +stderr: +stderr: +syntax error: invalid character: '#' ./calc.at:1398: cat stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90094,21 +90135,66 @@ }eg ' expout || exit 77 input: - | 1//2 + | 1 + 2 * 3 + !+ ++ ./calc.at:1398: $PREPARSER ./calc input -./calc.at:1397: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: cat stderr stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr input: stderr: +./calc.at:1395: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1400: $PREPARSER ./calc input +input: | (# + 1) = 1111 ./calc.at:1397: $PREPARSER ./calc input stderr: +input: +1.14: memory exhausted +input: +stderr: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error: invalid character: '#' ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | (1 + # + 1) = 1111 + | 1 + 2 * 3 + !- ++ +./calc.at:1398: $PREPARSER ./calc input stderr: +./calc.at:1395: $PREPARSER ./calc input +stderr: +1.14: memory exhausted +stderr: +stderr: +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1400: cat stderr +syntax error: invalid character: '#' +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90119,9 +90205,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' + | (#) + (#) = 2222 +./calc.at:1400: $PREPARSER ./calc input +stderr: ./calc.at:1398: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1397: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90131,38 +90222,52 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -515. calc.at:1407: testing Calculator %glr-parser %debug ... -./calc.at:1407: mv calc.y.tmp calc.y - +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' input: - | error +./calc.at:1395: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1397: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !* ++ ./calc.at:1398: $PREPARSER ./calc input -./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1397: cat stderr stderr: -1.1: syntax error, unexpected invalid token +1.6: syntax error: invalid character: '#' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.14: memory exhausted ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr +input: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1395: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: stderr: input: -stdout: +error: null divisor +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +1.14: memory exhausted +./calc.at:1400: $PREPARSER ./calc input stderr: - | (1 + # + 1) = 1111 -./calc.at:1397: $PREPARSER ./calc input -1.1: syntax error, unexpected invalid token +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1400: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - 1.6: syntax error: invalid character: '#' -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor stderr: ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -90175,24 +90280,17 @@ }eg ' expout || exit 77 1.6: syntax error: invalid character: '#' -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1400: $PREPARSER ./calc input ./calc.at:1398: cat stderr +./calc.at:1397: cat stderr +./calc.at:1400: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1398: $PREPARSER ./calc input +input: stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90202,29 +90300,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 = 2 = 3 -./calc.at:1398: $PREPARSER ./calc input -input: - | 1 2 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 ./calc.at:1400: $PREPARSER ./calc input +input: +./calc.at:1395: cat stderr stderr: -./calc.at:1397: cat stderr -1.7: syntax error, unexpected '=' -stderr: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | (1 + 1) / (1 - 1) +1.2: syntax error: invalid character: '#' ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error, unexpected '=' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -input: - | (1 + 1) / (1 - 1) +1.2: syntax error: invalid character: '#' ./calc.at:1397: $PREPARSER ./calc input stderr: +1.11-17: error: null divisor +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +507. calc.at:1395: ok +stderr: +1.11-17: error: null divisor ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90235,23 +90331,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1400: cat stderr -stderr: -1.11-17: error: null divisor ./calc.at:1398: cat stderr input: - | 1//2 + | (1 + # + 1) = 1111 + ./calc.at:1400: $PREPARSER ./calc input -stderr: input: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) - | - | +1 +stderr: + | (1 + #) = 1111 ./calc.at:1398: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1400: cat stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90261,16 +90361,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -2.1: syntax error, unexpected '+' -stderr: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1397: cat stderr -./calc.at:1400: cat stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90280,26 +90371,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -508. calc.at:1397: ok input: - | error + | (1 + 1) / (1 - 1) ./calc.at:1400: $PREPARSER ./calc input ./calc.at:1398: cat stderr stderr: -./calc.at:1407: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.11-17: error: null divisor ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: $PREPARSER ./calc /dev/null +input: +./calc.at:1397: cat stderr stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | (# + 1) = 1111 +./calc.at:1398: $PREPARSER ./calc input stderr: -1.1: syntax error, unexpected end of input +1.11-17: error: null divisor +508. calc.at:1397: ok +1.2: syntax error: invalid character: '#' ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - ./calc.at:1400: cat stderr -1.1: syntax error, unexpected end of input -input: +1.2: syntax error: invalid character: '#' ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90310,40 +90401,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1400: $PREPARSER ./calc input -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +510. calc.at:1400: ok + ./calc.at:1398: cat stderr -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) input: -./calc.at:1400: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + # + 1) = 1111 ./calc.at:1398: $PREPARSER ./calc input -input: stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 - | - | +1 +1.6: syntax error: invalid character: '#' ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: $PREPARSER ./calc input -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.6: syntax error: invalid character: '#' +514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90354,31 +90423,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1400: cat stderr +./calc.at:1405: mv calc.y.tmp calc.y + ./calc.at:1398: cat stderr -./calc.at:1400: $PREPARSER ./calc /dev/null -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + input: - | (!!) + (1 2) = 1 +./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (1 + 1) / (1 - 1) ./calc.at:1398: $PREPARSER ./calc input stderr: -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 +1.11-17: error: null divisor ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... +515. calc.at:1407: testing Calculator %glr-parser %debug ... stderr: +./calc.at:1407: mv calc.y.tmp calc.y + +1.11-17: error: null divisor +./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... ./calc.at:1408: mv calc.y.tmp calc.y +./calc.at:1398: cat stderr +509. calc.at:1398: ok +./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + +./calc.at:1405: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1407: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose ... +./calc.at:1409: mv calc.y.tmp calc.y + +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1408: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1400: cat stderr ./calc.at:1401: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -90391,19 +90485,6 @@ )' calc.c calc.h input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1400: $PREPARSER ./calc input -input: -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -90417,37 +90498,112 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1401: $PREPARSER ./calc input +stderr: +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1 2 +./calc.at:1401: $PREPARSER ./calc input +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1401: cat stderr +input: + | 1//2 +./calc.at:1401: $PREPARSER ./calc input +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: +./types.at:139: ./check +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1409: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: cat stderr +input: +======== Testing with C++ standard flags: '' + | error +./calc.at:1401: $PREPARSER ./calc input +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1401: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1401: $PREPARSER ./calc input +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1401: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: + | + | +1 +./calc.at:1401: $PREPARSER ./calc input +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1401: cat stderr +./calc.at:1401: $PREPARSER ./calc /dev/null +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1401: $PREPARSER ./calc input +stderr: 1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) 1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) 1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.1-46: error: 4444 != 1 -stderr: ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: cat stderr -stderr: stderr: 1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) 1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) 1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.1-46: error: 4444 != 1 +./calc.at:1401: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS input: -input: - | 1 2 - | (- *) + (1 2) = 1 + | (!!) + (1 2) = 1 ./calc.at:1401: $PREPARSER ./calc input -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1400: cat stderr stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1401: cat stderr +input: stderr: stdout: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1402: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -90459,25 +90615,10 @@ || /\t/ )' calc.c calc.h -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | (- *) + (1 2) = 1 +./calc.at:1401: $PREPARSER ./calc input input: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) - | (!!) + (1 2) = 1 -./calc.at:1400: $PREPARSER ./calc input -stderr: stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1401: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -90493,78 +90634,25 @@ | (2^2)^3 = 64 ./calc.at:1402: $PREPARSER ./calc input stderr: -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -./calc.at:1401: $PREPARSER ./calc input stderr: -./calc.at:1398: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 stderr: input: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1400: cat stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1401: cat stderr | 1 2 ./calc.at:1402: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1398: $PREPARSER ./calc input -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) stderr: 1.3: syntax error, unexpected number ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1400: $PREPARSER ./calc input -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr -stderr: stderr: 1.3: syntax error, unexpected number -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -input: - | error -./calc.at:1401: $PREPARSER ./calc input -stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90575,72 +90663,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1398: cat stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1400: cat stderr -./calc.at:1402: cat stderr -input: input: | (* *) + (*) + (*) -./calc.at:1400: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1401: cat stderr -input: - | 1 = 2 = 3 ./calc.at:1401: $PREPARSER ./calc input stderr: 1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1402: cat stderr +./calc.at:1401: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1401: $PREPARSER ./calc input stderr: -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | 1//2 ./calc.at:1402: $PREPARSER ./calc input stderr: -./calc.at:1400: cat stderr -input: -stderr: -./calc.at:1401: cat stderr 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' - | 1 + 2 * 3 + !- ++ -./calc.at:1398: $PREPARSER ./calc input ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: - | 1 + 2 * 3 + !+ ++ input: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: $PREPARSER ./calc input -stderr: - | - | +1 + | 1 + 2 * 3 + !- ++ ./calc.at:1401: $PREPARSER ./calc input -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: stderr: -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90651,78 +90706,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1400: $PREPARSER ./calc input stderr: ./calc.at:1402: cat stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1401: cat stderr -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1401: $PREPARSER ./calc /dev/null input: -stderr: -./calc.at:1398: cat stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | error ./calc.at:1402: $PREPARSER ./calc input -./calc.at:1400: cat stderr -stderr: stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | 1 + 2 * 3 + !* ++ -./calc.at:1398: $PREPARSER ./calc input -stderr: -stderr: 1.1: syntax error, unexpected invalid token -1.14: memory exhausted -input: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr | 1 + 2 * 3 + !* ++ -./calc.at:1400: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -stderr: -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -1.14: memory exhausted ./calc.at:1401: $PREPARSER ./calc input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +1.14: memory exhausted ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected invalid token +stderr: 1.14: memory exhausted -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90732,64 +90735,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: cat stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -input: -./calc.at:1398: cat stderr - | 1 = 2 = 3 -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1400: cat stderr -stderr: ./calc.at:1401: cat stderr -1.7: syntax error, unexpected '=' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr input: input: | (#) + (#) = 2222 - | (#) + (#) = 2222 -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1400: $PREPARSER ./calc input -stderr: -input: -1.7: syntax error, unexpected '=' stderr: - | (!!) + (1 2) = 1 ./calc.at:1401: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' stdout: + | 1 = 2 = 3 +stderr: ./calc.at:1403: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -90801,27 +90756,19 @@ || /\t/ )' calc.c calc.h +./calc.at:1402: $PREPARSER ./calc input 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1402: cat stderr -./calc.at:1401: cat stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1400: cat stderr -input: - | - | +1 +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.7: syntax error, unexpected '=' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +1.7: syntax error, unexpected '=' input: -./calc.at:1402: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -90835,41 +90782,10 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1398: cat stderr -stderr: - | (- *) + (1 2) = 1 -./calc.at:1401: $PREPARSER ./calc input -2.1: syntax error, unexpected '+' ./calc.at:1403: $PREPARSER ./calc input -input: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (1 + #) = 1111 +./calc.at:1401: cat stderr stderr: -./calc.at:1400: $PREPARSER ./calc input -input: ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -2.1: syntax error, unexpected '+' - | (1 + #) = 1111 -1.6: syntax error: invalid character: '#' -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -1.6: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90880,21 +90796,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: stderr: + | (1 + #) = 1111 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1402: cat stderr input: -1.6: syntax error: invalid character: '#' -./calc.at:1401: cat stderr | 1 2 ./calc.at:1403: $PREPARSER ./calc input -./calc.at:1400: cat stderr -./calc.at:1402: cat stderr input: -./calc.at:1402: $PREPARSER ./calc /dev/null stderr: - | (* *) + (*) + (*) -input: -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1398: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +stderr: +./calc.at:1402: $PREPARSER ./calc input +1.3: syntax error, unexpected number +stderr: +2.1: syntax error, unexpected '+' +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error, unexpected number +2.1: syntax error, unexpected '+' +./calc.at:1401: cat stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90904,36 +90834,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -stderr: -./calc.at:1400: $PREPARSER ./calc input -1.3: syntax error, unexpected number -stderr: -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error, unexpected end of file -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: cat stderr -stderr: -stderr: -stderr: -stderr: -1.1: syntax error, unexpected end of file -1.2: syntax error: invalid character: '#' -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.3: syntax error, unexpected number -input: - | (# + 1) = 1111 -./calc.at:1398: $PREPARSER ./calc input -stderr: ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90944,14 +90844,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1402: cat stderr +./calc.at:1403: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1402: $PREPARSER ./calc /dev/null +input: + | 1//2 +./calc.at:1403: $PREPARSER ./calc input +stderr: +stderr: +1.1: syntax error, unexpected end of file 1.2: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr -./calc.at:1400: cat stderr stderr: -./calc.at:1402: cat stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: 1.2: syntax error: invalid character: '#' -./calc.at:1403: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected end of file +./calc.at:1401: cat stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90962,12 +90878,15 @@ }eg ' expout || exit 77 input: -input: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1401: $PREPARSER ./calc input +stderr: +./calc.at:1402: cat stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' | (1 + # + 1) = 1111 -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1401: $PREPARSER ./calc input +stderr: +input: +1.6: syntax error: invalid character: '#' +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90977,51 +90896,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: cat stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1400: $PREPARSER ./calc input ./calc.at:1402: $PREPARSER ./calc input stderr: stderr: -stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -1.6: syntax error: invalid character: '#' +./calc.at:1403: cat stderr ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 stderr: -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -1.6: syntax error: invalid character: '#' -./calc.at:1398: cat stderr -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1398: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1401: $PREPARSER ./calc input -stderr: -stderr: -1.6: syntax error: invalid character: '#' -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | error +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1401: cat stderr stderr: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected invalid token +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91032,26 +90932,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1400: cat stderr stderr: -1.6: syntax error: invalid character: '#' +input: +1.1: syntax error, unexpected invalid token + | (1 + 1) / (1 - 1) ./calc.at:1402: cat stderr -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1401: $PREPARSER ./calc input stderr: +1.11-17: error: null divisor input: - | (1 + 1) / (1 - 1) -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1403: cat stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91061,42 +90952,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: | (!!) + (1 2) = 1 ./calc.at:1402: $PREPARSER ./calc input stderr: 1.11-17: error: null divisor -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error +stderr: 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -./calc.at:1398: cat stderr -./calc.at:1403: $PREPARSER ./calc input ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: cat stderr ./calc.at:1401: cat stderr stderr: -1.11-17: error: null divisor -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +input: 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -input: -input: - | (1 + 1) / (1 - 1) -./calc.at:1398: $PREPARSER ./calc input -stderr: -1.1: syntax error, unexpected invalid token - | 1 + 2 * 3 + !* ++ -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1400: cat stderr -stderr: -1.11-17: error: null divisor + | 1 = 2 = 3 +./calc.at:1403: $PREPARSER ./calc input +511. calc.at:1401: ok stderr: -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error, unexpected '=' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91107,8 +90982,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.14: memory exhausted -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.7: syntax error, unexpected '=' + +./calc.at:1402: cat stderr ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91119,54 +90996,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -510. calc.at:1400: 1.14: memory exhausted -./calc.at:1402: cat stderr - ok -1.11-17: error: null divisor ./calc.at:1403: cat stderr input: | (- *) + (1 2) = 1 ./calc.at:1402: $PREPARSER ./calc input -./calc.at:1401: cat stderr -input: -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: - | 1 = 2 = 3 -./calc.at:1403: $PREPARSER ./calc input 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1398: cat stderr -stderr: input: + | + | +1 +./calc.at:1403: $PREPARSER ./calc input +stderr: stderr: -1.7: syntax error, unexpected '=' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1401: $PREPARSER ./calc input 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 +2.1: syntax error, unexpected '+' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -509. calc.at:1398: 1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - ok -1.7: syntax error, unexpected '=' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error, unexpected '+' ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91177,9 +91028,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91192,37 +91040,24 @@ ' expout || exit 77 ./calc.at:1402: cat stderr ./calc.at:1403: cat stderr - -./calc.at:1401: cat stderr -input: input: +./calc.at:1403: $PREPARSER ./calc /dev/null +stderr: | (* *) + (*) + (*) ./calc.at:1402: $PREPARSER ./calc input - | - | +1 -input: -./calc.at:1403: $PREPARSER ./calc input +1.1: syntax error, unexpected end of input +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: - | (1 + #) = 1111 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1401: $PREPARSER ./calc input -2.1: syntax error, unexpected '+' ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -stderr: +1.1: syntax error, unexpected end of input stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -2.1: syntax error, unexpected '+' -stderr: -1.6: syntax error: invalid character: '#' ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91243,32 +91078,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1401: cat stderr ./calc.at:1403: cat stderr ./calc.at:1402: cat stderr -./calc.at:1403: $PREPARSER ./calc /dev/null +518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... +./calc.at:1411: mv calc.y.tmp calc.y + input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1403: $PREPARSER ./calc input stderr: - | (# + 1) = 1111 -1.1: syntax error, unexpected end of input -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose ... +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !+ ++ ./calc.at:1402: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1409: mv calc.y.tmp calc.y - -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected end of input +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stdout: +stderr: +./types.at:139: $PREPARSER ./test +stderr: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1402: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -91280,29 +91125,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1401: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1402: $PREPARSER ./calc input -stderr: -input: -./calc.at:1403: cat stderr ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1401: $PREPARSER ./calc input -518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... -./calc.at:1411: mv calc.y.tmp calc.y - -stderr: -stderr: -input: -1.6: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: +======== Testing with C++ standard flags: '' stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91313,74 +91139,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stderr: +./calc.at:1403: cat stderr ./calc.at:1402: cat stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1401: cat stderr input: -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: | 1 + 2 * 3 + !* ++ ./calc.at:1402: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1401: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: cat stderr -stderr: -1.11-17: error: null divisor -stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted -stderr: -input: -1.11-17: error: null divisor | (!!) + (1 2) = 1 ./calc.at:1403: $PREPARSER ./calc input -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: +stderr: +1.14: memory exhausted +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr stderr: -./calc.at:1402: cat stderr +1.14: memory exhausted +stderr: 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -input: - | (#) + (#) = 2222 -./calc.at:1402: $PREPARSER ./calc input -511. calc.at:1401: ./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91390,18 +91169,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - ok -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1403: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: - | (- *) + (1 2) = 1 -./calc.at:1403: $PREPARSER ./calc input ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91412,28 +91179,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1402: cat stderr +./calc.at:1403: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1402: $PREPARSER ./calc input +input: + | (- *) + (1 2) = 1 +./calc.at:1403: $PREPARSER ./calc input +stderr: stderr: 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 -input: - | (1 + #) = 1111 -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1409: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -stdout: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1411: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91444,18 +91214,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1405: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1403: cat stderr ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91466,49 +91224,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1403: cat stderr ./calc.at:1402: cat stderr input: -./calc.at:1405: $PREPARSER ./calc input | (* *) + (*) + (*) +./calc.at:1403: $PREPARSER ./calc input stderr: input: - | (# + 1) = 1111 + | (1 + #) = 1111 ./calc.at:1402: $PREPARSER ./calc input -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: $PREPARSER ./calc input -stderr: -./calc.at:1411: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stdout: stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./types.at:139: ./check ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: -1.2: syntax error: invalid character: '#' 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -input: - | 1 2 -./calc.at:1405: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91519,7 +91260,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91530,43 +91270,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1402: cat stderr -stderr: +input: ./calc.at:1403: cat stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | (# + 1) = 1111 +./calc.at:1402: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1402: cat stderr | 1 + 2 * 3 + !+ ++ -./calc.at:1405: cat stderr ./calc.at:1403: $PREPARSER ./calc input stderr: input: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + # + 1) = 1111 ./calc.at:1402: $PREPARSER ./calc input -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -1.6: syntax error: invalid character: '#' stderr: -./calc.at:1413: mv calc.y.tmp calc.y - +1.6: syntax error: invalid character: '#' ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 -input: stderr: -./calc.at:1405: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' stderr: | 1 + 2 * 3 + !- ++ +stdout: +1.6: syntax error: invalid character: '#' ./calc.at:1403: $PREPARSER ./calc input -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test stderr: ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91577,11 +91324,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: ./calc.at:1402: cat stderr -./calc.at:1405: cat stderr -input: +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./types.at:139: $PREPARSER ./test ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91592,30 +91342,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS input: +stderr: +./calc.at:1403: cat stderr | (1 + 1) / (1 - 1) ./calc.at:1402: $PREPARSER ./calc input - | error -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1403: cat stderr -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.11-17: error: null divisor ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !* ++ -./calc.at:1403: $PREPARSER ./calc input -stderr: stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) 1.11-17: error: null divisor + | 1 + 2 * 3 + !* ++ +======== Testing with C++ standard flags: '' +./calc.at:1403: $PREPARSER ./calc input stderr: 1.14: memory exhausted -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1405: cat stderr ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91626,7 +91370,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: 1.14: memory exhausted +./calc.at:1402: cat stderr +stderr: ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91637,28 +91386,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: cat stderr -./calc.at:1403: cat stderr +stdout: +./calc.at:1405: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +512. calc.at:1402: ./calc.at:1403: cat stderr + ok input: -512. calc.at:1402: ok - | 1 = 2 = 3 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1405: $PREPARSER ./calc input +stderr: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + +stderr: | (#) + (#) = 2222 +input: ./calc.at:1403: $PREPARSER ./calc input + | 1 2 +./calc.at:1405: $PREPARSER ./calc input stderr: stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: +./calc.at:1405: cat stderr 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +input: ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91669,67 +91449,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1405: cat stderr -input: -./calc.at:1403: cat stderr - | - | +1 + | 1//2 ./calc.at:1405: $PREPARSER ./calc input stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1403: cat stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y | (1 + #) = 1111 ./calc.at:1403: $PREPARSER ./calc input stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: 1.6: syntax error: invalid character: '#' ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1405: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1405: $PREPARSER ./calc /dev/null -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1413: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1403: cat stderr -./calc.at:1405: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1403: $PREPARSER ./calc input +519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +1.6: syntax error: invalid character: '#' input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1414: mv calc.y.tmp calc.y +stdout: +./calc.at:1413: mv calc.y.tmp calc.y -stderr: + | error ./calc.at:1405: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./types.at:139: ./check +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91740,68 +91491,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 ./calc.at:1403: cat stderr -./calc.at:1405: cat stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) input: - | (1 + # + 1) = 1111 + | (# + 1) = 1111 ./calc.at:1403: $PREPARSER ./calc input -input: stderr: - | (!!) + (1 2) = 1 -./calc.at:1405: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.2: syntax error: invalid character: '#' ./calc.at:1405: cat stderr -./calc.at:1403: cat stderr input: - | (- *) + (1 2) = 1 + | 1 = 2 = 3 ./calc.at:1405: $PREPARSER ./calc input -input: -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 - | (1 + 1) / (1 - 1) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: $PREPARSER ./calc input -stderr: stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -1.11-17: error: null divisor -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1405: cat stderr -1.11-17: error: null divisor -input: - | (* *) + (*) + (*) -./calc.at:1405: $PREPARSER ./calc input +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91812,103 +91522,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: cat stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -513. calc.at:1403: ok -./calc.at:1405: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1405: $PREPARSER ./calc input - -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1414: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1405: $PREPARSER ./calc input -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1405: cat stderr -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted -./calc.at:1405: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1416: mv calc.y.tmp calc.y - -./calc.at:1405: cat stderr -./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: - | (1 + #) = 1111 -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1405: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1405: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1405: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1405: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -./calc.at:1405: cat stderr -514. calc.at:1405: ok - -./calc.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -522. calc.at:1426: testing Calculator lalr1.cc %header ... -./calc.at:1426: mv calc.y.tmp calc.y - -./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1426: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1407: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -91920,7 +91536,14 @@ || /\t/ )' calc.c +stderr: +./calc.at:1403: cat stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +input: input: + | (1 + # + 1) = 1111 +./calc.at:1405: cat stderr +./calc.at:1403: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -91936,6 +91559,18 @@ | (2^2)^3 = 64 ./calc.at:1407: $PREPARSER ./calc input stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +stderr: +./calc.at:1405: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -92774,6 +92409,18 @@ Cleanup: popping nterm input (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Reading a token @@ -93613,6 +93260,9 @@ input: | 1 2 ./calc.at:1407: $PREPARSER ./calc input +./calc.at:1403: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1405: cat stderr stderr: Starting parse Entering state 0 @@ -93630,6 +93280,8 @@ Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.1: 2) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: $PREPARSER ./calc /dev/null +input: stderr: Starting parse Entering state 0 @@ -93646,6 +93298,17 @@ syntax error Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.1: 2) +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1403: $PREPARSER ./calc input +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +stderr: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93656,11 +93319,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.11-17: error: null divisor +./calc.at:1413: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1405: cat stderr ./calc.at:1407: cat stderr input: +./calc.at:1403: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1405: $PREPARSER ./calc input | 1//2 ./calc.at:1407: $PREPARSER ./calc input stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: Starting parse Entering state 0 Reading a token @@ -93682,6 +93368,8 @@ Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -93703,6 +93391,13 @@ Error: popping token '/' (1.1: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.1: ) +513. calc.at:1403: 1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 + ok +./calc.at:1405: cat stderr ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93713,7 +93408,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + + | (!!) + (1 2) = 1 +./calc.at:1405: $PREPARSER ./calc input +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1407: cat stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 input: | error ./calc.at:1407: $PREPARSER ./calc input @@ -93725,13 +93431,26 @@ syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr stderr: +input: + | (- *) + (1 2) = 1 +./calc.at:1405: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token Next token is token "invalid token" (1.1: ) syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93742,11 +93461,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1405: cat stderr +input: ./calc.at:1407: cat stderr + | (* *) + (*) + (*) +stderr: +./calc.at:1405: $PREPARSER ./calc input +stdout: +stderr: +./types.at:139: $PREPARSER ./test +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | 1 = 2 = 3 ./calc.at:1407: $PREPARSER ./calc input stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -93776,8 +93508,15 @@ Error: popping token '=' (1.1: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.1: ) +stderr: ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) stderr: +======== Testing with C++ standard flags: '' +520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... Starting parse Entering state 0 Reading a token @@ -93807,6 +93546,14 @@ Error: popping token '=' (1.1: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.1: ) +./calc.at:1405: cat stderr +./calc.at:1414: mv calc.y.tmp calc.y + +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1405: $PREPARSER ./calc input +stderr: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93817,7 +93564,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1405: $PREPARSER ./calc input +stderr: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1407: cat stderr +stderr: input: | | +1 @@ -93843,6 +93599,10 @@ Error: popping nterm input (1.1: ) Cleanup: discarding lookahead token '+' (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1405: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -93863,6 +93623,11 @@ syntax error Error: popping nterm input (1.1: ) Cleanup: discarding lookahead token '+' (1.1: ) +stderr: +1.14: memory exhausted +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.14: memory exhausted ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93873,8 +93638,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1405: cat stderr ./calc.at:1407: cat stderr +input: ./calc.at:1407: $PREPARSER ./calc /dev/null + | (#) + (#) = 2222 +./calc.at:1405: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -93882,7 +93651,11 @@ Now at end of input. syntax error Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -93890,6 +93663,12 @@ Now at end of input. syntax error Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1405: cat stderr +stderr: +stdout: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -93900,11 +93679,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: ./check +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (1 + #) = 1111 +./calc.at:1405: $PREPARSER ./calc input +stderr: +./calc.at:1414: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +1.6: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1407: cat stderr +stderr: +1.6: syntax error: invalid character: '#' input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1407: $PREPARSER ./calc input +./calc.at:1405: cat stderr +input: +stderr: + | (# + 1) = 1111 +./calc.at:1405: $PREPARSER ./calc input stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -94411,10 +94208,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' +stderr: ./calc.at:1407: cat stderr +stdout: +./calc.at:1408: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1405: cat stderr input: | (!!) + (1 2) = 1 ./calc.at:1407: $PREPARSER ./calc input +input: +input: + | (1 + # + 1) = 1111 +./calc.at:1405: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -94528,6 +94345,24 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1408: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -94640,6 +94475,848 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +1.6: syntax error: invalid character: '#' +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting tstderr: +oken ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -94650,11 +95327,904 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stdout: +./calc.at:1409: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 123): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 124): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + | (1 + 1) / (1 - 1) +./calc.at:1405: $PREPARSER ./calc input +input: +stderr: +1.11-17: error: null divisor +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1407: cat stderr + | 1 2 +./calc.at:1408: $PREPARSER ./calc input input: +input: +stderr: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1409: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor | (- *) + (1 2) = 1 ./calc.at:1407: $PREPARSER ./calc input stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -94772,6 +96342,22 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) stderr: Starting parse Entering state 0 @@ -94889,254 +96475,9 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1407: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1407: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +514. calc.at:1405: ok stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -95146,175 +96487,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -./calc.at:1408: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1407: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1408: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1407: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -input: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1407: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -96151,74 +97323,47 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: cat stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +input: + | 1//2 +./calc.at:1408: $PREPARSER ./calc input stderr: +./calc.at:1407: cat stderr Starting parse Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -97055,77 +98200,160 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 2 -stderr: -./calc.at:1408: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +input: + | (* *) + (*) + (*) +./calc.at:1407: $PREPARSER ./calc input +input: + | 1 2 +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 +Reading a token Next token is token '+' (1.1: ) Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) + $1 = nterm exp (1.1: 1111) $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -stderr: +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -97141,7 +98369,124 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1408: cat stderr stderr: Starting parse Entering state 0 @@ -97158,7 +98503,11 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1408: "$PERL" -pi -e 'use strict; +input: + | error +./calc.at:1408: $PREPARSER ./calc input +stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -97168,6 +98517,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -97178,13 +98535,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: cat stderr -./calc.at:1407: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1409: cat stderr input: +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1//2 -./calc.at:1408: $PREPARSER ./calc input -input: +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1407: cat stderr +./calc.at:1408: cat stderr stderr: +input: Starting parse Entering state 0 Reading a token @@ -97205,10 +98579,36 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ ./calc.at:1407: $PREPARSER ./calc input +input: +521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1416: mv calc.y.tmp calc.y + + | 1 = 2 = 3 stderr: +./calc.at:1408: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) stderr: Starting parse Entering state 0 @@ -97267,15 +98667,16 @@ Shifting token '!' (1.1: ) Entering state 5 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 124): +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 122): $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted + $2 = token '+' (1.1: ) Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -97287,16 +98688,26 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -97355,57 +98766,16 @@ Shifting token '!' (1.1: ) Entering state 5 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 124): +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 122): $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted + $2 = token '+' (1.1: ) Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: cat stderr -input: -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1408: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1407: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -input: -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -97415,112 +98785,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1408: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 = 2 = 3 -./calc.at:1408: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -97550,145 +98814,77 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1409: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -97699,95 +98895,99 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: cat stderr -./calc.at:1407: cat stderr -input: input: - | (1 + #) = 1111 -./calc.at:1407: $PREPARSER ./calc input - | - | +1 -./calc.at:1408: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1408: cat stderr + | error +./calc.at:1409: $PREPARSER ./calc input +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) + | + | +1 +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -97809,84 +99009,14 @@ Cleanup: discarding lookahead token '+' (2.1: ) ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1407: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -97906,6 +99036,8 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) +input: + | 1 + 2 * 3 + !* ++ ./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -97916,7 +99048,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -97926,8 +99059,144 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) ./calc.at:1408: cat stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: cat stderr +stderr: ./calc.at:1408: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) stderr: Starting parse Entering state 0 @@ -97936,17 +99205,47 @@ 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: cat stderr +input: + | 1 = 2 = 3 stderr: +./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" (1.1: ) -input: - | (# + 1) = 1111 -./calc.at:1407: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -97957,152 +99256,49 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1408: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1408: cat stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -98112,6 +99308,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1407: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1409: cat stderr +input: stderr: Starting parse Entering state 0 @@ -98360,8 +99561,15 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (#) + (#) = 2222 ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: $PREPARSER ./calc input +input: + | + | +1 +stderr: stderr: +./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -98609,21 +99817,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1407: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: Starting parse Entering state 0 Reading a token @@ -98631,33 +99824,40 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token syntax error: invalid character: '#' Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) Shifting token error (1.1: ) Entering state 11 Next token is token error (1.1: ) Error: discarding token error (1.1: ) Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token Next token is token ')' (1.1: ) Entering state 11 Next token is token ')' (1.1: ) @@ -98668,32 +99868,39 @@ $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 8 +Entering state 30 Reading a token Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -98707,8 +99914,29 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +stderr: ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -98717,33 +99945,40 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token syntax error: invalid character: '#' Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) Shifting token error (1.1: ) Entering state 11 Next token is token error (1.1: ) Error: discarding token error (1.1: ) Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token Next token is token ')' (1.1: ) Entering state 11 Next token is token ')' (1.1: ) @@ -98754,32 +99989,39 @@ $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 8 +Entering state 30 Reading a token Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -98793,9 +100035,63 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1408: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1409: cat stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | (!!) + (1 2) = 1 ./calc.at:1408: $PREPARSER ./calc input +./calc.at:1409: $PREPARSER ./calc /dev/null +stderr: stderr: Starting parse Entering state 0 @@ -98908,18 +100204,22 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) Starting parse Entering state 0 Reading a token @@ -99031,6 +100331,18 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1407: cat stderr +input: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -99041,13 +100353,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: cat stderr -./calc.at:1407: cat stderr -input: - | (1 + 1) / (1 - 1) + | (1 + #) = 1111 ./calc.at:1407: $PREPARSER ./calc input stderr: -input: +./calc.at:1408: cat stderr +./calc.at:1409: cat stderr Starting parse Entering state 0 Reading a token @@ -99067,89 +100377,51 @@ Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token Next token is token ')' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): +Entering state 26 +Reducing stack 0 by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) + $2 = token error (1.1: ) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -99164,8 +100436,8 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1408: $PREPARSER ./calc input +input: +input: stderr: Starting parse Entering state 0 @@ -99186,89 +100458,51 @@ Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token Next token is token ')' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): +Entering state 26 +Reducing stack 0 by rule 14 (line 119): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) + $2 = token error (1.1: ) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -99282,6 +100516,10 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (- *) + (1 2) = 1 +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1408: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -99399,7 +100637,256 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -99517,6 +101004,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -99527,6 +101015,253 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -99537,14 +101272,101 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1407: cat stderr +./calc.at:1408: cat stderr +./calc.at:1409: cat stderr input: -515. calc.at:1407: ok +input: +input: + | (# + 1) = 1111 +./calc.at:1407: $PREPARSER ./calc input | (* *) + (*) + (*) ./calc.at:1408: $PREPARSER ./calc input - + | (!!) + (1 2) = 1 +./calc.at:1409: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) stderr: +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -99661,6 +101483,188 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) stderr: Starting parse Entering state 0 @@ -99777,6 +101781,118 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -99787,14 +101903,158 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr ./calc.at:1408: cat stderr input: + | (- *) + (1 2) = 1 +./calc.at:1409: $PREPARSER ./calc input +input: +./calc.at:1407: cat stderr | 1 + 2 * 3 + !+ ++ ./calc.at:1408: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +Starting parse +Entering state 0 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 @@ -99858,6 +102118,126 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1407: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: stderr: Starting parse Entering state 0 @@ -99924,22 +102304,187 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stdout: -./calc.at:1413: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: | 1 + 2 * 3 + !- ++ +stderr: ./calc.at:1408: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) stderr: Starting parse Entering state 0 @@ -100006,23 +102551,9 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: cat stderr ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -523. calc.at:1431: testing Calculator C++ ... - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1413: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -100089,10 +102620,7 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1431: mv calc.y.tmp calc.y - -./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -100102,82 +102630,114 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1409: $PREPARSER ./calc input +stderr: +stdout: stderr: -./calc.at:1408: cat stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -100185,764 +102745,610 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1407: cat stderr +./calc.at:1408: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +======== Testing with C++ standard flags: '' +input: +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1409: cat stderr + | (1 + 1) / (1 - 1) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1407: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) Entering state 20 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 29 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1408: $PREPARSER ./calc input +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -101001,18 +103407,20 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1408: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1409: $PREPARSER ./calc input stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token @@ -101070,16 +103478,16 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -101089,7 +103497,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: cat stderr + | (#) + (#) = 2222 +./calc.at:1408: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -101126,45 +103535,122 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -101172,812 +103658,632 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 -Next token is token '\n' (2.16-3.0: ) +Reading a token +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: cat stderr +./calc.at:1409: cat stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +515. calc.at:1407: ok +./calc.at:1408: cat stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1409: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + #) = 1111 +./calc.at:1408: $PREPARSER ./calc input +stderr: +stderr: + +Starting parse +Entering state 0 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 -Next token is token '\n' (9.15-10.0: ) +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr +input: +./calc.at:1408: cat stderr + | (#) + (#) = 2222 +./calc.at:1409: $PREPARSER ./calc input +input: + | (# + 1) = 1111 +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stdout: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: -stdout: -./calc.at:1409: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -input: -input: -./calc.at:1431: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | (#) + (#) = 2222 - | 1 2 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1408: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -102076,27 +104382,447 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: ./check ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +input: +input: + | (1 + # + 1) = 1111 +./calc.at:1408: $PREPARSER ./calc input + | (1 + #) = 1111 ./calc.at:1409: $PREPARSER ./calc input stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1413: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +522. calc.at:1426: testing Calculator lalr1.cc %header ... +./calc.at:1426: mv calc.y.tmp calc.y + +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -102106,6 +104832,133 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1409: cat stderr +./calc.at:1408: cat stderr +input: + | (# + 1) = 1111 +input: +./calc.at:1409: $PREPARSER ./calc input +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1408: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -102120,77 +104973,688 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.3: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1426: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.9: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +======== Testing with C++ standard flags: '' +516. calc.at:1408: ok +./calc.at:1409: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: + | (1 + # + 1) = 1111 +./calc.at:1409: $PREPARSER ./calc input +stderr: + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1409: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -102203,6 +105667,63 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: cat stderr +517. calc.at:1409: ok +523. calc.at:1431: testing Calculator C++ ... +./calc.at:1431: mv calc.y.tmp calc.y + +./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: + +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +524. calc.at:1432: testing Calculator C++ %locations ... +./calc.at:1432: mv calc.y.tmp calc.y + +./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1431: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1432: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +stdout: +./calc.at:1411: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1411: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -102239,14 +105760,14 @@ Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -102321,20 +105842,20 @@ Entering state 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) Entering state 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -102357,7 +105878,7 @@ Entering state 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -102421,14 +105942,14 @@ Entering state 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -102450,7 +105971,7 @@ Entering state 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -102493,7 +106014,7 @@ Entering state 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -102501,7 +106022,7 @@ Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -102521,7 +106042,7 @@ Entering state 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -102594,19 +106115,19 @@ Entering state 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -102628,7 +106149,7 @@ Entering state 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -102688,7 +106209,7 @@ Entering state 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -102707,7 +106228,7 @@ Entering state 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -102730,7 +106251,7 @@ Entering state 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -102793,7 +106314,7 @@ Entering state 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -102802,7 +106323,7 @@ Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -102810,7 +106331,7 @@ Entering state 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -102895,14 +106416,14 @@ Entering state 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) Entering state 33 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -102966,7 +106487,7 @@ Entering state 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -102975,7 +106496,7 @@ Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -102995,7 +106516,7 @@ Entering state 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -103039,46 +106560,8 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1413: cat stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1//2 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1408: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -103115,14 +106598,14 @@ Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -103197,20 +106680,20 @@ Entering state 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) Entering state 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -103233,7 +106716,7 @@ Entering state 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -103297,14 +106780,14 @@ Entering state 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -103326,7 +106809,7 @@ Entering state 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -103369,7 +106852,7 @@ Entering state 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -103377,7 +106860,7 @@ Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -103397,7 +106880,7 @@ Entering state 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -103470,19 +106953,19 @@ Entering state 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -103504,7 +106987,7 @@ Entering state 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -103564,7 +107047,7 @@ Entering state 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -103583,7 +107066,7 @@ Entering state 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -103606,7 +107089,7 @@ Entering state 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 123): +Reducing stack 0 by rule 11 (line 111): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -103669,7 +107152,7 @@ Entering state 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -103678,7 +107161,7 @@ Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -103686,7 +107169,7 @@ Entering state 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -103771,14 +107254,14 @@ Entering state 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) Entering state 33 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -103842,7 +107325,7 @@ Entering state 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -103851,7 +107334,7 @@ Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -103871,7 +107354,7 @@ Entering state 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 124): +Reducing stack 0 by rule 12 (line 112): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -103915,10 +107398,57 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | (1 + #) = 1111 -./calc.at:1408: $PREPARSER ./calc input +input: + | 1 2 +./calc.at:1411: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: cat stderr input: + | 1//2 +./calc.at:1411: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -103939,74 +107469,156 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) - | 1 2 -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: cat stderr +input: + | error +./calc.at:1411: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1411: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: cat stderr +input: + | + | +1 +./calc.at:1411: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -104014,14 +107626,32 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1413: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104031,21 +107661,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1411: cat stderr +./calc.at:1411: $PREPARSER ./calc /dev/null +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1411: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -104054,64 +107700,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -104124,34 +107940,256 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104161,16 +108199,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: cat stderr -./calc.at:1409: cat stderr -./calc.at:1408: cat stderr -input: - | error -input: - | (# + 1) = 1111 -./calc.at:1408: $PREPARSER ./calc input +./calc.at:1411: cat stderr input: -./calc.at:1413: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1411: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -104179,56 +108211,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 Reading a token -Next token is token ')' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -104241,17 +108315,133 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1//2 +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1411: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -104260,56 +108450,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -104322,50 +108559,125 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: -stdout: -stderr: -./calc.at:1411: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1408: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104375,27 +108687,244 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1411: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1411: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1413: "$PERL" -pi -e 'use strict; +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104405,11 +108934,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: cat stderr -./calc.at:1413: cat stderr +./calc.at:1411: cat stderr +stderr: +stdout: +./calc.at:1413: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1411: $PREPARSER ./calc input input: - | 1 = 2 = 3 +stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -104423,19 +108966,7 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1413: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -104447,9 +108978,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -104457,17 +108988,52 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 +Entering state 30 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Reading a token @@ -104479,9 +109045,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -104489,114 +109055,55 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) - | (1 + # + 1) = 1111 -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 +Entering state 30 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: cat stderr +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1411: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token @@ -105194,8 +109701,7 @@ -> $$ = nterm exp (10.6-10: -1) Entering state 12 Next token is token ')' (10.11: ) -Shifting tokstderr: -en ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 27 Reducing stack 0 by rule 13 (line 113): $1 = token '(' (10.5: ) @@ -105434,97 +109940,139 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | - | +1 -input: - | error -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1413: $PREPARSER ./calc input +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: Starting parse Entering state 0 @@ -106362,36 +110910,10 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1408: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -106401,32 +110923,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1411: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr stderr: -./calc.at:1408: cat stderr Starting parse Entering state 0 Reading a token @@ -106442,13 +110957,10 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1411: $PREPARSER ./calc input +stderr: ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -106459,8 +110971,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: Starting parse Entering state 0 Reading a token @@ -106472,298 +110982,135 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) - | (1 + 1) / (1 - 1) -./calc.at:1408: $PREPARSER ./calc input -stderr: -./calc.at:1413: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: cat stderr +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: -./calc.at:1408: cat stderr - | 1 = 2 = 3 -./calc.at:1413: "$PERL" -pi -e 'use strict; + | 1//2 +./calc.at:1413: $PREPARSER ./calc input +stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -106773,45 +111120,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: $PREPARSER ./calc input -input: -stderr: -516. calc.at:1408: ok - | 1//2 -./calc.at:1411: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1413: cat stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: Starting parse Entering state 0 Reading a token @@ -106832,37 +111140,8 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr stderr: Starting parse Entering state 0 @@ -106884,20 +111163,10 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: "$PERL" -pi -e 'use strict; +input: + | (#) + (#) = 2222 +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -106907,7 +111176,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: cat stderr stderr: Starting parse Entering state 0 @@ -106916,484 +111184,84 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) Shifting token error (1.2: ) Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: cat stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.42: ) +Next token is token ')' (1.9: ) Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -107406,113 +111274,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | error -./calc.at:1411: $PREPARSER ./calc input -input: -stderr: - | - | +1 -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1413: cat stderr -./calc.at:1409: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1409: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -107521,90 +111284,76 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.9: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Next token is token '\n' (1.17-2.0: ) @@ -107625,19 +111374,23 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: cat stderr stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) +stdout: input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1414: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + + | error +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107648,192 +111401,41 @@ }eg ' expout || exit 77 stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1411: cat stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1414: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1409: cat stderr -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: cat stderr +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) input: -./calc.at:1411: "$PERL" -pi -e 'use strict; + | (1 + #) = 1111 +./calc.at:1411: $PREPARSER ./calc input +stderr: +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -107843,266 +111445,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1409: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1411: cat stderr -stderr: -stderr: -stdout: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -524. calc.at:1432: testing Calculator C++ %locations ... -./calc.at:1432: mv calc.y.tmp calc.y - Starting parse Entering state 0 Reading a token @@ -108110,486 +111452,64 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: - | - | +1 -./calc.at:1411: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.5: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -108602,101 +111522,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -./calc.at:1413: cat stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1411: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1409: $PREPARSER ./calc input stderr: -./calc.at:1411: $PREPARSER ./calc /dev/null -input: Starting parse Entering state 0 Reading a token @@ -109533,16 +112361,7 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: - | (* *) + (*) + (*) -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1413: $PREPARSER ./calc input +./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token @@ -109550,453 +112369,64 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -110009,42 +112439,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: cat stderr -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -./calc.at:1413: cat stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: -input: Starting parse Entering state 0 Reading a token @@ -110881,15 +113278,104 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1411: $PREPARSER ./calc input - | (- *) + (1 2) = 1 +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 input: -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1413: $PREPARSER ./calc input | 1 2 ./calc.at:1414: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +./calc.at:1411: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: +stderr: + | (# + 1) = 1111 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1411: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -110905,8 +113391,6 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) - | 1 + 2 * 3 + !+ ++ -./calc.at:1413: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -110915,102 +113399,636 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 Reading a token -Next token is token ')' (1.5: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: cat stderr +./calc.at:1413: cat stderr +stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stdout: + | 1//2 +input: +./types.at:139: $PREPARSER ./test +./calc.at:1414: $PREPARSER ./calc input + | + | +1 +./calc.at:1413: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1411: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +======== Testing with C++ standard flags: '' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: + | (1 + # + 1) = 1111 +./calc.at:1411: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.13: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1413: cat stderr +./calc.at:1413: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +input: + | (1 + 1) / (1 - 1) +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1413: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -111024,8 +114042,128 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1414: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1413: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: stderr: Starting parse Entering state 0 @@ -111274,7 +114412,19 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1414: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -111287,9 +114437,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -111297,67 +114447,15 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Entering state 28 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -111606,134 +114704,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: cat stderr stderr: -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -111745,9 +114717,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -111755,53 +114727,15 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Entering state 28 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1411: "$PERL" -pi -e 'use strict; +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +518. calc.at:1411: ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111811,8 +114745,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; + ok +./calc.at:1413: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111822,194 +114757,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: $PREPARSER ./calc input -stderr: -./calc.at:1411: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | 1//2 -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1432: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1409: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) input: | (!!) + (1 2) = 1 -./calc.at:1411: $PREPARSER ./calc input stderr: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1414: cat stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: +stdout: + stderr: +stdout: +./types.at:139: ./check Starting parse Entering state 0 Reading a token @@ -112121,22 +114879,15 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | + | +1 +./calc.at:1414: $PREPARSER ./calc input stderr: - | (* *) + (*) + (*) -./calc.at:1409: $PREPARSER ./calc input -stdout: stderr: -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -112248,18 +114999,30 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1414: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -112270,9 +115033,42 @@ }eg ' expout || exit 77 stderr: -./calc.at:1414: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1413: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1414: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1413: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -112280,14 +115076,20 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) Reading a token Next token is token ')' (1.5: ) @@ -112295,7 +115097,782 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: $PREPARSER ./calc /dev/null +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: cat stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ... +./calc.at:1433: mv calc.y.tmp calc.y + +./calc.at:1414: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1414: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1413: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -112322,7 +115899,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -112330,7 +115907,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -112356,7 +115933,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -112364,7 +115941,7 @@ Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -112388,22 +115965,381 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1414: $PREPARSER ./calc input +stdout: +./types.at:139: ./check +stderr: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1413: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -112413,12 +116349,613 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1414: cat stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: + | 1 + 2 * 3 + !- ++ +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1433: $CXX $CPPFLAGS $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: input: - | error + | (- *) + (1 2) = 1 +./calc.at:1414: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr +./calc.at:1413: cat stderr +input: +input: + | (* *) + (*) + (*) +./calc.at:1414: $PREPARSER ./calc input | 1 + 2 * 3 + !* ++ ./calc.at:1413: $PREPARSER ./calc input stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -112441,7 +116978,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -112468,7 +117005,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -112476,7 +117013,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -112502,7 +117039,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -112510,7 +117047,7 @@ Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -112534,9 +117071,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: cat stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1414: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -112607,13 +117143,963 @@ Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr +./calc.at:1413: cat stderr +input: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1414: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1413: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !- ++ +./calc.at:1414: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1409: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1413: cat stderr +input: +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1414: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1414: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stdout: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1416: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: cat stderr +======== Testing with C++ standard flags: '' +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -112623,6 +118109,200 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1416: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1414: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1413: $PREPARSER ./calc input +stderr: +input: + | (#) + (#) = 2222 +./calc.at:1414: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -113459,106 +119139,78 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: stderr: stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1409: cat stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | (- *) + (1 2) = 1 -./calc.at:1411: $PREPARSER ./calc input +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -113859,7 +119511,103 @@ Shifting token ')' (5.4: ) Entering state 27 Reducing stack 0 by rule 13 (line 113): - $1 = token '('input: + $1 = token '('Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -114398,130 +120146,18 @@ Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1409: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 2 ./calc.at:1416: $PREPARSER ./calc input -./calc.at:1414: cat stderr -./calc.at:1413: cat stderr -stderr: +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -114538,8 +120174,9 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token @@ -114551,61 +120188,36 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr +input: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1416: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -114613,103 +120225,64 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114722,43 +120295,90 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: + | 1//2 +./calc.at:1416: $PREPARSER ./calc input +input: stderr: - | (#) + (#) = 2222 -./calc.at:1413: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -114770,173 +120390,90 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1413: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token ')' (1.3: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114949,7 +120486,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: stderr: Starting parse Entering state 0 @@ -114962,181 +120498,99 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1411: cat stderr +Cleanup: discarding lookahead token '/' (1.3: ) ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token ')' (1.3: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -115149,7 +120603,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: ./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -115160,10 +120613,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | (* *) + (*) + (*) -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1414: "$PERL" -pi -e 'use strict; +input: + | (# + 1) = 1111 +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -115173,71 +120626,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1416: cat stderr stderr: Starting parse Entering state 0 @@ -115246,350 +120635,20 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) Shifting token error (1.2: ) Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: cat stderr -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: cat stderr -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1413: cat stderr -stderr: - | - | +1 -./calc.at:1414: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1409: cat stderr -stderr: - | 1//2 -./calc.at:1416: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -input: - | (1 + #) = 1111 -stderr: -./calc.at:1413: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !* ++ -./calc.at:1409: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -115638,125 +120697,36 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./types.at:139: ./check +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1413: cat stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: cat stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1416: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -Starting parse -Entering state 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Error: discarding token '+' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -115807,338 +120777,120 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: input: -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1413: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | 1 + 2 * 3 + !+ ++ -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: cat stderr -./calc.at:1416: cat stderr +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1409: cat stderr -input: -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1413: $PREPARSER ./calc input -input: -stderr: - | error -./calc.at:1416: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -116152,10 +120904,6 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -input: -./calc.at:1414: cat stderr - | (#) + (#) = 2222 -./calc.at:1409: $PREPARSER ./calc input ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 @@ -116163,12 +120911,7 @@ Next token is token "invalid token" (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) - | 1 + 2 * 3 + !- ++ -./calc.at:1411: $PREPARSER ./calc input stderr: -stderr: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Reading a token @@ -116176,153 +120919,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 Reading a token Next token is token ')' (1.7: ) -Entering state 11 +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 27 +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -116335,248 +121027,104 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr +./calc.at:1416: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1414: $PREPARSER ./calc input +input: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Starting parse -Entering state 0 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token ')' (1.3: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -116599,66 +121147,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: cat stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: cat stderr -./calc.at:1416: cat stderr -./calc.at:1414: cat stderr -input: -./calc.at:1409: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1413: $PREPARSER ./calc input -input: -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1411: $PREPARSER ./calc input -input: | 1 = 2 = 3 ./calc.at:1416: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1414: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -116671,9 +121162,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -116681,54 +121172,16 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Entering state 28 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1409: $PREPARSER ./calc input +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1413: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -116812,6 +121265,12 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +519. calc.at:1413: stderr: +./types.at:139: $PREPARSER ./test + ok Starting parse Entering state 0 Reading a token @@ -116841,9 +121300,155 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: cat stderr +======== Testing with C++ standard flags: '' +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | (1 + 1) / (1 - 1) +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1416: cat stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: stderr: Starting parse Entering state 0 @@ -116864,52 +121469,90 @@ Shifting token '+' (1.4: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 Reading a token Next token is token ')' (1.7: ) -Entering state 11 +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Entering state 27 +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -116922,6 +121565,113 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | + | +1 +./calc.at:1416: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1414: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +520. calc.at:1414: ok +./calc.at:1416: cat stderr +./calc.at:1416: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) + +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span} ... +stderr: +./calc.at:1434: mv calc.y.tmp calc.y + +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +stdout: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1416: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1416: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -117170,306 +121920,6 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: Starting parse Entering state 0 @@ -117718,24 +122168,126 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose ... +./calc.at:1435: mv calc.y.tmp calc.y + +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y ./calc.at:1416: cat stderr -./calc.at:1411: cat stderr -input: - | - | +1 -./calc.at:1413: cat stderr input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | (!!) + (1 2) = 1 ./calc.at:1416: $PREPARSER ./calc input -./calc.at:1409: cat stderr +./calc.at:1434: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -117743,12 +122295,125 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117758,17 +122423,119 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1416: cat stderr +stdout: +./types.at:139: $PREPARSER ./test +stderr: input: + | (- *) + (1 2) = 1 +./calc.at:1416: $PREPARSER ./calc input stderr: - | (# + 1) = 1111 Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -117776,18 +122543,15 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) - | (#) + (#) = 2222 -./calc.at:1411: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1409: $PREPARSER ./calc input +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: -./calc.at:1414: cat stderr Starting parse Entering state 0 Reading a token @@ -117795,101 +122559,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Reading a token -Next token is token ')' (1.7: ) +Next token is token '=' (1.15: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1435: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1416: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 +Entering state 11 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -117903,8 +122800,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -117912,56 +122809,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -117974,6 +122917,464 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr +stderr: +input: +stdout: + | 1 + 2 * 3 + !+ ++ +./types.at:139: ./check +./calc.at:1416: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1416: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +======== Testing with C++ standard flags: '' +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1416: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1416: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -118072,21 +123473,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -118185,6 +123572,25 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +input: + | (1 + #) = 1111 +./calc.at:1416: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -118204,90 +123610,52 @@ Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -118300,6 +123668,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -118307,25 +123677,33 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -118369,21 +123747,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (!!) + (1 2) = 1 -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1416: cat stderr -stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -118393,7 +123757,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: $PREPARSER ./calc /dev/null +stderr: +stdout: +./calc.at:1431: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1416: cat stderr +input: + | (# + 1) = 1111 +input: +./calc.at:1416: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1431: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -118401,98 +123797,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -118505,29 +123859,10 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1409: cat stderr -./calc.at:1411: cat stderr -./calc.at:1413: cat stderr stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -118535,98 +123870,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -118639,21 +123932,29 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1416: cat stderr input: -input: + | 1 2 +./calc.at:1431: $PREPARSER ./calc input stderr: - | (1 + # + 1) = 1111 -./calc.at:1409: $PREPARSER ./calc input - | (1 + #) = 1111 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -519. calc.at:1413: ok -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1414: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -118663,7 +123964,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1431: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1416: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1431: $PREPARSER ./calc input stderr: +stderr: +syntax error Starting parse Entering state 0 Reading a token @@ -118703,7 +124013,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -118747,86 +124057,10 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: Starting parse Entering state 0 @@ -118867,7 +124101,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -118911,7 +124145,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: cat stderr ./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -118922,7 +124155,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr +./calc.at:1431: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1416: $PREPARSER ./calc input +input: + | error stderr: +./calc.at:1431: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -118942,52 +124193,90 @@ Shifting token '+' (1.4: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 Reading a token Next token is token ')' (1.7: ) -Entering state 11 +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Entering state 27 +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -119000,22 +124289,10 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1416: cat stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - - | (- *) + (1 2) = 1 -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1409: cat stderr +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -119024,273 +124301,20 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1411: cat stderr -stderr: -./calc.at:1409: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1416: $PREPARSER ./calc input -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.6: 1) @@ -119298,7 +124322,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -119307,7 +124331,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -119343,7 +124367,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -119352,7 +124376,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -119360,7 +124384,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): +Reducing stack 0 by rule 10 (line 102): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -119385,10 +124409,19 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (# + 1) = 1111 -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1414: "$PERL" -pi -e 'use strict; +stderr: +syntax error +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119398,445 +124431,55 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1416: cat stderr +521. calc.at:1416: ok +./calc.at:1431: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1431: $PREPARSER ./calc input + stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: cat stderr +syntax error +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: cat stderr +input: + | + | +1 +./calc.at:1431: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1409: "$PERL" -pi -e 'use strict; +syntax error +stderr: +stdout: +./calc.at:1432: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose ... +./calc.at:1437: mv calc.y.tmp calc.y + +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119846,82 +124489,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: - | (* *) + (*) + (*) + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1432: $PREPARSER ./calc input stderr: -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1409: cat stderr +./calc.at:1431: cat stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: $PREPARSER ./calc /dev/null stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +input: + | 1 2 +./calc.at:1432: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119931,488 +124532,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -517. calc.at:1409: ok -./calc.at:1411: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +1.3: syntax error +./calc.at:1431: cat stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120423,94 +124546,23 @@ }eg ' expout || exit 77 input: - | (1 + # + 1) = 1111 -./calc.at:1411: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1431: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1432: cat stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120520,213 +124572,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr - +input: + | 1//2 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1437: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1431: cat stderr stderr: +1.3: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1414: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ... +stderr: | (!!) + (1 2) = 1 -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1433: mv calc.y.tmp calc.y - -./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1431: $PREPARSER ./calc input +1.3: syntax error stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +error: 2222 != 1 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +error: 2222 != 1 +./calc.at:1432: cat stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120736,190 +124613,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | error +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1431: cat stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1414: $PREPARSER ./calc input +1.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1431: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: "$PERL" -pi -e 'use strict; +syntax error +syntax error +error: 2222 != 1 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +error: 2222 != 1 +stderr: +1.1: syntax error +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120929,685 +124644,94 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: cat stderr +./calc.at:1432: cat stderr input: + | (* *) + (*) + (*) +./calc.at:1431: $PREPARSER ./calc input input: +stderr: + | 1 = 2 = 3 +./calc.at:1432: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +1.7: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1431: cat stderr input: - | 1 + 2 * 3 + !- ++ -./calc.at:1414: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1411: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1416: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1431: $PREPARSER ./calc input stderr: +1.7: syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1431: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: cat stderr stderr: +input: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | + | +1 +./calc.at:1432: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1431: cat stderr +2.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +2.1: syntax error +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121617,7 +124741,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1432: cat stderr +./calc.at:1432: $PREPARSER ./calc /dev/null +input: + | (#) + (#) = 2222 +./calc.at:1431: $PREPARSER ./calc input +stderr: +1.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +1.1: syntax error +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121627,8 +124768,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span} ... -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121638,389 +124778,72 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: mv calc.y.tmp calc.y - -./calc.at:1416: cat stderr -./calc.at:1411: cat stderr -./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1414: cat stderr +./calc.at:1431: cat stderr input: - | (* *) + (*) + (*) +./calc.at:1432: cat stderr + | (1 + #) = 1111 +./calc.at:1431: $PREPARSER ./calc input +stderr: input: - | 1 + 2 * 3 + !* ++ -518. calc.at:1411: ok -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1414: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1432: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: cat stderr +./calc.at:1431: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1432: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1414: "$PERL" -pi -e 'use strict; +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +input: + | (# + 1) = 1111 +./calc.at:1431: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122030,7 +124853,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1432: cat stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122040,482 +124865,127 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1416: cat stderr -./calc.at:1433: $CXX $CPPFLAGS $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS input: + | (- *) + (1 2) = 1 +./calc.at:1432: $PREPARSER ./calc input +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: cat stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1431: $PREPARSER ./calc input +stderr: +input: +syntax error: invalid character: '#' +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1432: $PREPARSER ./calc input +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: cat stderr +./calc.at:1432: cat stderr input: | 1 + 2 * 3 + !+ ++ -./calc.at:1416: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1414: $PREPARSER ./calc input +./calc.at:1432: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1431: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr +error: null divisor +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !- ++ -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1432: $PREPARSER ./calc input stderr: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: cat stderr +./calc.at:1431: cat stderr +523. calc.at:1431: ok +input: + | (#) + (#) = 2222 +./calc.at:1432: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token 1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token 1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1414: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122525,8 +124995,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1432: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + + | (1 + #) = 1111 +./calc.at:1432: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122536,310 +125020,78 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1432: cat stderr input: -./calc.at:1416: cat stderr - | (1 + #) = 1111 -./calc.at:1414: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1432: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +439. types.at:139: ok +./calc.at:1432: cat stderr +529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1438: mv calc.y.tmp calc.y + +./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + +input: + | (1 + # + 1) = 1111 +./calc.at:1432: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token 1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token 1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: cat stderr +stderr: +530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose ... +./calc.at:1440: mv calc.y.tmp calc.y + +stdout: +./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1433: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + input: - | 1 + 2 * 3 + !* ++ -./calc.at:1416: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1432: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.11-17: error: null divisor +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose ... -./calc.at:1435: mv calc.y.tmp calc.y - -./calc.at:1414: "$PERL" -pi -e 'use strict; +1.11-17: error: null divisor +input: +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122849,7 +125101,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1438: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1432: cat stderr +stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +524. calc.at:1432: ok +input: + | 1 2 +./calc.at:1433: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +1.3: syntax error +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122859,1554 +125141,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1416: cat stderr +./calc.at:1433: cat stderr input: + | 1//2 +./calc.at:1433: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +./calc.at:1440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: cat stderr input: - | (#) + (#) = 2222 -./calc.at:1416: $PREPARSER ./calc input - | (# + 1) = 1111 + | error +./calc.at:1433: $PREPARSER ./calc input stderr: -./calc.at:1414: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1416: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + #) = 1111 -./calc.at:1416: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + # + 1) = 1111 -./calc.at:1414: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: cat stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1414: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1414: $PREPARSER ./calc input -stderr: -./calc.at:1435: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: cat stderr -./calc.at:1414: cat stderr -input: -520. calc.at:1414: ok - | (1 + # + 1) = 1111 -./calc.at:1416: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.1: syntax error +531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose ... +./calc.at:1441: mv calc.y.tmp calc.y -./calc.at:1416: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1416: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124416,22 +125183,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: cat stderr -521. calc.at:1416: ok -528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose ... -./calc.at:1437: mv calc.y.tmp calc.y - -./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - -./calc.at:1437: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1438: mv calc.y.tmp calc.y - -./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1438: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1433: cat stderr stderr: stdout: -./calc.at:1431: "$PERL" -ne ' +./calc.at:1426: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -124440,248 +125196,18 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc + )' calc.cc calc.hh input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1431: $PREPARSER ./calc input -stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 2 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | 1//2 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | error -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: | 1 = 2 = 3 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | - | +1 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -./calc.at:1431: $PREPARSER ./calc /dev/null -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -error: 2222 != 1 -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.7: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./calc.at:1426: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1431: cat stderr -input: +stderr: +./types.at:139: $PREPARSER ./test input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -124696,64 +125222,16 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 - | 1 + 2 * 3 + !+ ++ ./calc.at:1426: $PREPARSER ./calc input -./calc.at:1431: $PREPARSER ./calc input stderr: stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +======== Testing with C++ standard flags: '' stderr: ./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stdout: -input: -./calc.at:1433: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - | 1 2 -./calc.at:1426: $PREPARSER ./calc input -stderr: -stderr: -syntax error -./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1433: $PREPARSER ./calc input -stderr: -stderr: -input: -syntax error - | 1 + 2 * 3 + !- ++ -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124763,160 +125241,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS input: | 1 2 -./calc.at:1433: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr -stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error -input: -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1//2 -./calc.at:1431: cat stderr ./calc.at:1426: $PREPARSER ./calc input -stderr: -input: -syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1433: cat stderr - | (#) + (#) = 2222 -./calc.at:1431: $PREPARSER ./calc input stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 -stderr: + | + | +1 ./calc.at:1433: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stderr: -1.3: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: cat stderr -./calc.at:1431: cat stderr -input: -./calc.at:1433: cat stderr - | error -./calc.at:1426: $PREPARSER ./calc input -input: stderr: syntax error ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + #) = 1111 -./calc.at:1431: $PREPARSER ./calc input - | error -stderr: -./calc.at:1433: $PREPARSER ./calc input -syntax error -stderr: -stderr: -1.1: syntax error -syntax error: invalid character: '#' -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error ./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -1.1: syntax error -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: cat stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1426: $PREPARSER ./calc input -stderr: -./calc.at:1433: cat stderr -stderr: stdout: -./calc.at:1431: cat stderr -./calc.at:1432: "$PERL" -ne ' +./calc.at:1434: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -124927,17 +125269,12 @@ || /\t/ )' calc.cc +stderr: syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: input: - | 1 = 2 = 3 -./calc.at:1433: $PREPARSER ./calc input - | (# + 1) = 1111 -input: -syntax error -./calc.at:1431: $PREPARSER ./calc input +2.1: syntax error +./calc.at:1441: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -124951,20 +125288,7 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -stderr: -./calc.at:1432: $PREPARSER ./calc input -stderr: -1.7: syntax error -syntax error: invalid character: '#' -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.7: syntax error -stderr: -syntax error: invalid character: '#' +./calc.at:1434: $PREPARSER ./calc input ./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -124975,7 +125299,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -124986,98 +125309,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: cat stderr -input: - | 1 2 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1433: cat stderr -1.3: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1426: $PREPARSER ./calc input input: + | 1 2 +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1433: $PREPARSER ./calc /dev/null +./calc.at:1426: cat stderr stderr: - | - | +1 -./calc.at:1433: $PREPARSER ./calc input -syntax error stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.3: syntax error -stderr: -./calc.at:1431: cat stderr -stderr: -syntax error -2.1: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1431: $PREPARSER ./calc input -2.1: syntax error -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: cat stderr -stderr: -syntax error: invalid character: '#' +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1426: cat stderr | 1//2 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1433: cat stderr +./calc.at:1426: $PREPARSER ./calc input stderr: -./calc.at:1426: $PREPARSER ./calc /dev/null 1.3: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: $PREPARSER ./calc /dev/null -stderr: stderr: syntax error -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125087,26 +125343,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -1.1: syntax error ./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: syntax error stderr: 1.1: syntax error -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: cat stderr +./calc.at:1434: cat stderr ./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -125118,6 +125359,12 @@ }eg ' expout || exit 77 input: + | 1//2 +./calc.at:1434: $PREPARSER ./calc input +stderr: +stdout: +./types.at:139: ./check +stderr: ./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -125128,251 +125375,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1431: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +1.3: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: null divisor -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1426: cat stderr ./calc.at:1433: cat stderr input: -stderr: - | error -./calc.at:1432: $PREPARSER ./calc input -error: null divisor -stderr: -input: -./calc.at:1426: cat stderr -1.1: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1433: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1426: $PREPARSER ./calc input -1.1: syntax error -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 1.2: syntax error 1.18: syntax error 1.23: syntax error 1.41: syntax error 1.1-46: error: 4444 != 1 -./calc.at:1431: cat stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -523. calc.at:1431: ok -./calc.at:1432: $PREPARSER ./calc input -stderr: -./calc.at:1433: cat stderr -./calc.at:1426: cat stderr -1.7: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -input: -input: - | (!!) + (1 2) = 1 - | (!!) + (1 2) = 1 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error -error: 2222 != 1 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -error: 2222 != 1 -1.11: syntax error -1.1-16: error: 2222 != 1 - -./calc.at:1432: cat stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: $PREPARSER ./calc input -stderr: -2.1: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: cat stderr -stderr: -2.1: syntax error -./calc.at:1426: cat stderr -input: -input: - | (- *) + (1 2) = 1 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1433: $PREPARSER ./calc input -stderr: -stderr: -syntax error -syntax error -error: 2222 != 1 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -syntax error -syntax error -error: 2222 != 1 -./calc.at:1432: cat stderr -./calc.at:1432: $PREPARSER ./calc /dev/null -530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose ... -./calc.at:1440: mv calc.y.tmp calc.y - -stderr: -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -1.1: syntax error -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: cat stderr -stderr: -1.1: syntax error -input: -./calc.at:1426: cat stderr - | (* *) + (*) + (*) -./calc.at:1433: $PREPARSER ./calc input input: -stderr: - | (* *) + (*) + (*) -1.2: syntax error -1.10: syntax error -1.16: syntax error ./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error ./calc.at:1426: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125383,249 +125406,20 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -syntax error -syntax error syntax error ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -./calc.at:1432: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1432: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr stderr: 1.2: syntax error 1.18: syntax error 1.23: syntax error 1.41: syntax error 1.1-46: error: 4444 != 1 -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: cat stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1433: $PREPARSER ./calc input -stdout: -./calc.at:1434: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr -stderr: -stderr: -./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1434: $PREPARSER ./calc input -stderr: -input: -./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -input: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1432: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1433: $PREPARSER ./calc input -input: -stderr: -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1426: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 - | 1 2 -./calc.at:1434: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.3: syntax error -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: cat stderr -./calc.at:1433: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1434: cat stderr - | (- *) + (1 2) = 1 -input: -./calc.at:1432: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1433: $PREPARSER ./calc input input: stderr: -./calc.at:1426: cat stderr - | 1//2 +syntax error + | error ./calc.at:1434: $PREPARSER ./calc input -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error -stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -input: -stderr: - | (#) + (#) = 2222 -./calc.at:1426: $PREPARSER ./calc input -1.3: syntax error -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' ./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -125636,8 +125430,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: cat stderr -./calc.at:1432: cat stderr ./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -125648,114 +125440,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: cat stderr -input: -input: - | (* *) + (*) + (*) -./calc.at:1432: $PREPARSER ./calc input - | error -stderr: -./calc.at:1434: $PREPARSER ./calc input -input: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.1: syntax error - | (1 + #) = 1111 ./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr -./calc.at:1433: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS 1.1: syntax error -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (1 + #) = 1111 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1432: cat stderr -./calc.at:1434: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1432: $PREPARSER ./calc input ./calc.at:1433: cat stderr input: - | 1 = 2 = 3 -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: $PREPARSER ./calc input -stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -input: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -stderr: -./calc.at:1433: $PREPARSER ./calc input -stderr: -1.7: syntax error ./calc.at:1426: cat stderr -./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (# + 1) = 1111 -stderr: -input: -1.2: syntax error: invalid character: '#' + | (!!) + (1 2) = 1 ./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -125766,249 +125460,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1432: $PREPARSER ./calc input -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1434: cat stderr -input: - | - | +1 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: cat stderr -stderr: -2.1: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -2.1: syntax error -input: - | (1 + # + 1) = 1111 ./calc.at:1433: $PREPARSER ./calc input -./calc.at:1432: cat stderr -stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr -stderr: input: - | (#) + (#) = 2222 -./calc.at:1432: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -input: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (1 + # + 1) = 1111 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: cat stderr + | 1 = 2 = 3 ./calc.at:1426: $PREPARSER ./calc input stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1434: $PREPARSER ./calc /dev/null -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.1: syntax error -syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1433: cat stderr -1.1: syntax error -./calc.at:1432: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1433: $PREPARSER ./calc input -input: -stderr: - | (1 + #) = 1111 -1.11-17: error: null divisor -./calc.at:1432: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 ./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr -./calc.at:1434: cat stderr -stderr: -stderr: -input: - | (1 + 1) / (1 - 1) -./calc.at:1426: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -input: -1.6: syntax error: invalid character: '#' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1434: $PREPARSER ./calc input -error: null divisor -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -error: null divisor -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1433: cat stderr -./calc.at:1432: cat stderr -525. calc.at:1433: ok -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1434: cat stderr -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -1.2: syntax error: invalid character: '#' -input: -./calc.at:1426: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1434: $PREPARSER ./calc input stderr: 1.11: syntax error 1.1-16: error: 2222 != 1 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -126018,4243 +125481,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1433: cat stderr stderr: -522. calc.at:1426: 1.11: syntax error -1.1-16: error: 2222 != 1 - ok -./calc.at:1432: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - input: - | (1 + # + 1) = 1111 -./calc.at:1432: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1433: $PREPARSER ./calc input +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1434: cat stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1432: cat stderr +syntax error stderr: 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 - | (1 + 1) / (1 - 1) -./calc.at:1432: $PREPARSER ./calc input -531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose ... -stderr: -1.11-17: error: null divisor -./calc.at:1441: mv calc.y.tmp calc.y - -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -1.11-17: error: null divisor -./calc.at:1434: cat stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1443: mv calc.y.tmp calc.y - - | (* *) + (*) + (*) -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1432: cat stderr -./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -524. calc.at:1432: stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error - ok -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: cat stderr - -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1434: $PREPARSER ./calc input -stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1441: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1443: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: -stdout: -./calc.at:1438: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1434: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1434: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1445: mv calc.y.tmp calc.y - -./calc.at:1434: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | (1 + #) = 1111 -./calc.at:1434: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -1.6: syntax error: invalid character: '#' - | 1 2 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1434: cat stderr -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1438: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -input: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1438: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: cat stderr -./calc.at:1434: cat stderr -input: - | error -./calc.at:1438: $PREPARSER ./calc input -input: -stderr: -./calc.at:1445: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1434: $PREPARSER ./calc input -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: cat stderr -./calc.at:1434: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1438: $PREPARSER ./calc input -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1434: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: -1.11-17: error: null divisor -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1438: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1434: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -526. calc.at:1434: ok -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: cat stderr - -./calc.at:1438: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1438: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1446: mv calc.y.tmp calc.y - -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1438: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1438: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1438: $PREPARSER ./calc input -stderr: -./calc.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1438: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130262,383 +125509,11 @@ ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1438: cat stderr -stdout: -./calc.at:1437: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1438: $PREPARSER ./calc input -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1437: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 2 -./calc.at:1437: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1438: $PREPARSER ./calc input -stderr: -1.3: syntax error, unexpected number -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error, unexpected number -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1437: "$PERL" -pi -e 'use strict; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130648,7 +125523,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: "$PERL" -pi -e 'use strict; +stderr: +1.7: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr +./calc.at:1426: cat stderr +input: +input: + | + | +1 +./calc.at:1426: $PREPARSER ./calc input +stderr: +1.7: syntax error + | (* *) + (*) + (*) +stderr: +./calc.at:1433: $PREPARSER ./calc input +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130658,20 +125557,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr -./calc.at:1438: cat stderr -input: - | 1//2 -./calc.at:1437: $PREPARSER ./calc input -input: -stderr: - | (#) + (#) = 2222 -./calc.at:1438: $PREPARSER ./calc input -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1437: "$PERL" -pi -e 'use strict; +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130681,258 +125571,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1426: cat stderr +./calc.at:1434: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | + | +1 +./calc.at:1434: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: $PREPARSER ./calc /dev/null +stderr: +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1433: cat stderr +2.1: syntax error stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1437: cat stderr input: -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130942,19 +125609,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error -./calc.at:1437: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1433: $PREPARSER ./calc input +syntax error stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1438: cat stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -1.1: syntax error, unexpected invalid token +./calc.at:1434: $PREPARSER ./calc /dev/null +./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.1: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr +stderr: +1.1: syntax error input: - | (1 + #) = 1111 -./calc.at:1438: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1426: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1433: $PREPARSER ./calc input stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -130964,211 +125651,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: cat stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +stderr: +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | 1 = 2 = 3 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131178,16 +125688,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1434: $PREPARSER ./calc input stderr: -1.7: syntax error, unexpected '=' -./calc.at:1438: cat stderr -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1426: cat stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error, unexpected '=' input: - | (# + 1) = 1111 -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -pi -e 'use strict; + | (#) + (#) = 2222 +./calc.at:1433: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131198,207 +125723,13 @@ }eg ' expout || exit 77 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token 1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +1.8: syntax error: invalid character: '#' input: - | - | +1 -./calc.at:1437: $PREPARSER ./calc input -stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; + | (!!) + (1 2) = 1 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1434: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131408,12 +125739,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error, unexpected '+' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1434: $PREPARSER ./calc input stderr: -2.1: syntax error, unexpected '+' -./calc.at:1438: cat stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1433: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +syntax error +error: 2222 != 1 +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131423,250 +125763,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: - | (1 + # + 1) = 1111 -./calc.at:1438: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1434: cat stderr stderr: -./calc.at:1437: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token 1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: $PREPARSER ./calc /dev/null -stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token +input: + | (- *) + (1 2) = 1 +./calc.at:1434: $PREPARSER ./calc input 1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error, unexpected end of input -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected end of input -./calc.at:1438: "$PERL" -pi -e 'use strict; +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131676,7 +125794,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: "$PERL" -pi -e 'use strict; +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131686,171 +125808,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1438: cat stderr -./calc.at:1437: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: cat stderr +./calc.at:1434: cat stderr +./calc.at:1433: cat stderr input: input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1437: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1438: $PREPARSER ./calc input + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1434: $PREPARSER ./calc input stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1433: $PREPARSER ./calc input stderr: stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; +syntax error +syntax error +error: 2222 != 1 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -131861,152 +125862,9 @@ }eg ' expout || exit 77 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1437: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +./calc.at:1434: cat stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132016,16 +125874,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: cat stderr -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -529. calc.at:1438: ok -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132035,21 +125884,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr input: - | (- *) + (1 2) = 1 - -./calc.at:1437: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1434: $PREPARSER ./calc input stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1426: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +./types.at:139: $PREPARSER ./test +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1434: $PREPARSER ./calc input +input: +1.6: syntax error: invalid character: '#' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' + | (* *) + (*) + (*) +stderr: +./calc.at:1426: $PREPARSER ./calc input +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132059,23 +125929,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1437: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none ... -./calc.at:1448: mv calc.y.tmp calc.y - -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132085,22 +125946,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1437: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1437: $PREPARSER ./calc input -stderr: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1433: cat stderr +syntax error +syntax error +syntax error +./calc.at:1434: cat stderr +442. types.at:139: ok input: - | 1 + 2 * 3 + !- ++ -./calc.at:1437: $PREPARSER ./calc input -stderr: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1433: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1434: $PREPARSER ./calc input +1.11-17: error: null divisor +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132110,18 +125972,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1437: $PREPARSER ./calc input +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +1.11-17: error: null divisor 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132131,17 +125993,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1437: cat stderr +input: + + | 1 + 2 * 3 + !+ ++ +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1433: cat stderr +./calc.at:1434: cat stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | (1 + #) = 1111 -./calc.at:1437: $PREPARSER ./calc input +./calc.at:1434: $PREPARSER ./calc input +stderr: +./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +525. calc.at:1433: ok +stderr: +input: stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1437: "$PERL" -pi -e 'use strict; +stderr: + | 1 + 2 * 3 + !- ++ +stdout: +./calc.at:1426: $PREPARSER ./calc input +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132151,16 +126042,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr +stderr: +stderr: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +./calc.at:1434: cat stderr +441. types.at:139: ok input: | (# + 1) = 1111 -./calc.at:1437: $PREPARSER ./calc input +./calc.at:1434: $PREPARSER ./calc input stderr: 1.2: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... stderr: + +./calc.at:1443: mv calc.y.tmp calc.y + 1.2: syntax error: invalid character: '#' -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1426: cat stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132170,16 +126083,48 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr input: + | (#) + (#) = 2222 +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1445: mv calc.y.tmp calc.y + +./calc.at:1434: cat stderr +./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +syntax error: invalid character: '#' +syntax error: invalid character: '#' +input: +./calc.at:1446: mv calc.y.tmp calc.y + | (1 + # + 1) = 1111 -./calc.at:1437: $PREPARSER ./calc input +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1443: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1426: cat stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132189,16 +126134,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr +./calc.at:1445: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: +./calc.at:1434: cat stderr + | (1 + #) = 1111 +./calc.at:1426: $PREPARSER ./calc input +stderr: +stderr: +stdout: +syntax error: invalid character: '#' +./types.at:139: $PREPARSER ./test +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | (1 + 1) / (1 - 1) -./calc.at:1437: $PREPARSER ./calc input +./calc.at:1434: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error: invalid character: '#' +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.11-17: error: null divisor -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +440. types.at:139: stderr: + ok +1.11-17: error: null divisor +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132208,17 +126179,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr -528. calc.at:1437: ok - -536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh" ... -./calc.at:1449: mv calc.y.tmp calc.y -./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1449: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1426: cat stderr stderr: stdout: -./calc.at:1435: "$PERL" -ne ' +./calc.at:1434: cat stderr +./calc.at:1437: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -132227,9 +126193,15 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc calc.hh + )' calc.cc input: + | (# + 1) = 1111 +./calc.at:1426: $PREPARSER ./calc input +stderr: +input: +syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -132243,20 +126215,40 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' stderr: -./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr +526. calc.at:1434: input: + ok +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1 2 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 1.3: syntax error, unexpected number -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr stderr: 1.3: syntax error, unexpected number -./calc.at:1435: "$PERL" -pi -e 'use strict; +input: + | (1 + # + 1) = 1111 +./calc.at:1426: $PREPARSER ./calc input + +stderr: +syntax error: invalid character: '#' +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132266,16 +126258,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none ... +./calc.at:1448: mv calc.y.tmp calc.y + +./calc.at:1437: cat stderr +./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1//2 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1426: $PREPARSER ./calc input stderr: 1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1435: "$PERL" -pi -e 'use strict; +stderr: +error: null divisor +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132285,16 +126301,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +stderr: +error: null divisor +./calc.at:1437: cat stderr input: | error -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 1.1: syntax error, unexpected invalid token -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.1: syntax error, unexpected invalid token -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: cat stderr +536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh" ... +./calc.at:1449: mv calc.y.tmp calc.y + +./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132304,16 +126337,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr input: | 1 = 2 = 3 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1448: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS stderr: 1.7: syntax error, unexpected '=' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr stderr: 1.7: syntax error, unexpected '=' -./calc.at:1435: "$PERL" -pi -e 'use strict; +522. calc.at:1426: ok +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132323,17 +126358,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +./calc.at:1437: cat stderr +./calc.at:1449: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS + input: | | +1 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 2.1: syntax error, unexpected '+' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 2.1: syntax error, unexpected '+' -./calc.at:1435: "$PERL" -pi -e 'use strict; +537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh" ... +./calc.at:1451: mv calc.y.tmp calc.y + +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132343,14 +126383,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr -./calc.at:1435: $PREPARSER ./calc /dev/null +./calc.at:1437: cat stderr +./calc.at:1437: $PREPARSER ./calc /dev/null +./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: 1.1: syntax error, unexpected end of input -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.1: syntax error, unexpected end of input -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132360,24 +126401,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +./calc.at:1437: cat stderr input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132387,18 +126428,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +./calc.at:1451: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1437: cat stderr input: | (!!) + (1 2) = 1 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132408,20 +126450,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +./calc.at:1437: cat stderr input: | (- *) + (1 2) = 1 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132431,20 +126473,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +stderr: +stdout: +./calc.at:1437: cat stderr +./types.at:139: $PREPARSER ./test +stderr: input: | (* *) + (*) + (*) -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132454,21 +126500,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr input: | 1 + 2 * 3 + !+ ++ -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input +443. types.at:139: ok stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr + +./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: | 1 + 2 * 3 + !- ++ -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose ... +./calc.at:1453: mv calc.y.tmp calc.y + +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132478,18 +126531,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +./calc.at:1437: cat stderr input: | (#) + (#) = 2222 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1453: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./types.at:139: $PREPARSER ./test stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1435: "$PERL" -pi -e 'use strict; +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132499,16 +126558,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: cat stderr +444. types.at:139: ok +./calc.at:1437: cat stderr input: | (1 + #) = 1111 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: -stdout: 1.6: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: "$PERL" -ne ' +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: cat stderr +input: +539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed ... +./calc.at:1454: mv calc.y.tmp calc.y + +./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | (# + 1) = 1111 +./calc.at:1437: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +stderr: +stdout: +./calc.at:1435: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -132517,10 +126603,18 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc + )' calc.cc calc.hh -stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -132535,7 +126629,43 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1454: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1435: $PREPARSER ./calc input +stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +stderr: +./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1437: $PREPARSER ./calc input +input: +stderr: + | 1 2 +1.6: syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: $PREPARSER ./calc input +stderr: +stderr: +1.3: syntax error, unexpected number +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +stdout: +./calc.at:1438: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +1.3: syntax error, unexpected number +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132545,16 +126675,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1435: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1435: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' +stdout: +input: ./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -132565,7 +126688,57 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1440: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1437: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1435: cat stderr +./calc.at:1440: $PREPARSER ./calc input +input: + | (1 + 1) / (1 - 1) +./calc.at:1437: $PREPARSER ./calc input +input: +stderr: +1.11-17: error: null divisor +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1435: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -133584,10 +127757,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -134606,1701 +128777,12 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: -./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -input: - | 1 2 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -input: - | (1 + 1) / (1 - 1) -./calc.at:1435: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.11-17: error: null divisor -./calc.at:1440: cat stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1//2 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1435: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -527. calc.at:1435: ok -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr - -input: - | error -./calc.at:1440: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1440: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh" ... -./calc.at:1451: mv calc.y.tmp calc.y - -./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1440: cat stderr -input: - | - | +1 -./calc.at:1440: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr -./calc.at:1440: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr -input: -./calc.at:1451: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1440: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1440: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./calc.at:1440: cat stderr -./calc.at:1441: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | (- *) + (1 2) = 1 -./calc.at:1440: $PREPARSER ./calc input -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1441: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1440: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 @@ -137250,8 +129732,7 @@ Stack now 0 6 8 23 Reading a token Next token is token number (13.7: 3) -Shifting token numstderr: -ber (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): @@ -137273,367 +129754,54 @@ Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1445: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.11-17: error: null divisor +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -138651,23 +130819,13 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -./calc.at:1440: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1438: $PREPARSER ./calc input +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -138677,9 +130835,63 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +input: +./calc.at:1435: cat stderr | 1 2 -./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -138701,8 +130913,19 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1440: cat stderr -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: cat stderr +input: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -138724,7 +130947,10 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1441: "$PERL" -pi -e 'use strict; + | error +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1438: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -138734,1030 +130960,1371 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: input: - | 1 + 2 * 3 + !+ ++ +1.1: syntax error, unexpected invalid token +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +528. calc.at:1437: ok +./calc.at:1440: cat stderr +stderr: +1.1: syntax error, unexpected invalid token + | 1//2 +./calc.at:1438: $PREPARSER ./calc input +input: + | 1//2 +stderr: ./calc.at:1440: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1435: cat stderr +stderr: + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 = 2 = 3 +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: $PREPARSER ./calc input +stderr: +./calc.at:1440: cat stderr +./calc.at:1438: cat stderr +1.7: syntax error, unexpected '=' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1438: $PREPARSER ./calc input +stderr: +1.7: syntax error, unexpected '=' +input: + | error +./calc.at:1440: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace ... +./calc.at:1455: mv calc.y.tmp calc.y + +./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./calc.at:1435: cat stderr +stdout: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +./calc.at:1440: cat stderr + | + | +1 +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1438: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1441: $PREPARSER ./calc input +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error, unexpected '+' +input: +input: + | 1 = 2 = 3 +./calc.at:1440: $PREPARSER ./calc input + | 1 = 2 = 3 +stderr: +./calc.at:1438: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 21 Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 Stack now 0 6 8 20 29 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Stack now 0 6 2 10 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token -Next token is token ')' () +Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' () +Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '-' () +Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 Stack now 0 6 8 -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' () +Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 19 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' () +Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 Stack now 0 6 8 19 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 Stack now 0 6 8 19 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' () +Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 23 Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 32 Stack now 0 6 8 23 32 -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 23 Stack now 0 6 4 12 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' () +Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' () +Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: cat stderr -stderr: +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1435: cat stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -139772,1095 +132339,1096 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 20 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1435: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 stderr: +1.1: syntax error, unexpected end of input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 21 Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 Stack now 0 6 8 20 29 -Next token is token '=' () +Next token is token '=' (2.12: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' () +Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Stack now 0 6 2 10 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' () +Next token is token '=' (4.6: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' () +Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token -Next token is token ')' () +Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' () +Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' () +Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' () +Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '-' () +Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 Stack now 0 6 8 -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' () +Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' () +Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 19 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' () +Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 Stack now 0 6 8 19 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 Stack now 0 6 8 19 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' () +Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 23 Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 32 Stack now 0 6 8 23 32 -Next token is token '=' () +Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 23 Stack now 0 6 4 12 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' () +Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' () +Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1//2 -stderr: +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -140875,77 +133443,47 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 20 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 stderr: +1.1: syntax error, unexpected end of input +input: | 1 2 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -140960,96 +133498,149 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (2) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: cat stderr +input: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: cat stderr +./calc.at:1440: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1435: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1438: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1455: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1441: cat stderr stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token number (2) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1441: "$PERL" -pi -e 'use strict; +input: + | + | +1 +./calc.at:1440: $PREPARSER ./calc input + | 1//2 +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141059,7 +133650,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1441: $PREPARSER ./calc input stderr: +stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -141074,74 +133677,47 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1438: cat stderr ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: cat stderr stderr: Starting parse Entering state 0 @@ -141157,76 +133733,59 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1438: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | error -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1445: "$PERL" -pi -e 'use strict; +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1435: cat stderr +stderr: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141236,6 +133795,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1441: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1440: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | error +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc /dev/null stderr: Starting parse Entering state 0 @@ -141246,8 +133824,24 @@ Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: cat stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141257,7 +133851,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 stderr: +./calc.at:1435: cat stderr Starting parse Entering state 0 Stack now 0 @@ -141267,10 +133870,7 @@ Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 input: -./calc.at:1440: cat stderr - | 1//2 -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141280,70 +133880,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: + | (- *) + (1 2) = 1 +./calc.at:1435: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '/' () +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) Stack now 0 - | (#) + (#) = 2222 -./calc.at:1440: $PREPARSER ./calc input +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: cat stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1438: $PREPARSER ./calc input ./calc.at:1441: cat stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '/' () -Stack now 0 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1440: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1440: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +input: | 1 = 2 = 3 +./calc.at:1441: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -141353,517 +133942,298 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: cat stderr -./calc.at:1441: cat stderr -./calc.at:1440: cat stderr -input: -input: - | error - | - | +1 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1445: $PREPARSER ./calc input -input: -stderr: - | (1 + #) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Stack now 0 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -141880,26 +134250,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Stack now 0 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -141910,417 +134261,298 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: cat stderr -./calc.at:1441: $PREPARSER ./calc /dev/null -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: cat stderr -./calc.at:1440: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -input: -input: - | 1 = 2 = 3 -./calc.at:1445: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1440: $PREPARSER ./calc input -stderr: -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Stack now 0 8 18 -Error: popping token '=' () +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '=' () -Stack now 0 -stderr: -./calc.at:1441: cat stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 20 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Stack now 0 8 18 -Error: popping token '=' () -Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '=' () -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -142337,9 +134569,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1440: "$PERL" -pi -e 'use strict; +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142350,16 +134581,8 @@ }eg ' expout || exit 77 stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -142677,10 +134900,6 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1440: cat stderr -./calc.at:1445: cat stderr Starting parse Entering state 0 Stack now 0 @@ -142998,14 +135217,47 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -input: - | - | +1 -./calc.at:1445: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1441: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143015,35 +135267,70 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1435: cat stderr +input: +./calc.at:1440: cat stderr + | (* *) + (*) + (*) stderr: +./calc.at:1435: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '+' () +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1441: cat stderr Starting parse Entering state 0 Stack now 0 @@ -143053,96 +135340,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -143161,33 +135473,21 @@ Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Stack now 0 -Cleanup: discarding lookahead token '+' () -Stack now 0 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1438: cat stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -143197,96 +135497,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -143303,9 +135628,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (!!) + (1 2) = 1 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1445: "$PERL" -pi -e 'use strict; +input: +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143315,6 +135639,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1441: cat stderr ./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -143325,7 +135652,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1440: cat stderr +./calc.at:1435: cat stderr stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -143466,9 +135796,15 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: cat stderr -./calc.at:1440: cat stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1435: $PREPARSER ./calc input +input: + | + | +1 +stderr: +./calc.at:1441: $PREPARSER ./calc input +input: stderr: Starting parse Entering state 0 @@ -143610,40 +135946,9 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1445: $PREPARSER ./calc /dev/null -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input () -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Stack now 0 - | (1 + 1) / (1 - 1) -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (- *) + (1 2) = 1 ./calc.at:1440: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1441: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input () -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Stack now 0 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -143654,113 +135959,120 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 22 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 8 22 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 8 22 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -143768,7 +136080,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -143786,10 +136098,45 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (- *) + (1 2) = 1 Starting parse Entering state 0 Stack now 0 @@ -143799,113 +136146,120 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 22 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 8 22 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 8 22 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -143913,7 +136267,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -143931,8 +136285,35 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1445: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143942,7 +136323,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -143952,8 +136335,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1438: cat stderr stderr: -./calc.at:1445: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: cat stderr +./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1441: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1441: $PREPARSER ./calc /dev/null +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -144102,10 +136507,162 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: cat stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -144254,662 +136811,617 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1445: $PREPARSER ./calc input -530. calc.at:1440: ok -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1441: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +445. types.at:139: stderr: + ok +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' () -Shifting token ')' () +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Stack now 0 8 20 4 12 -Error: popping nterm exp (3) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token ')' () -Shifting token ')' () +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' () +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +stderr: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: cat stderr +./calc.at:1440: cat stderr +./calc.at:1441: cat stderr +stderr: +input: +input: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1440: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1441: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1438: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 -Stack now 0 8 20 4 12 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 20 4 12 21 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 -Stack now 0 8 20 4 12 21 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '*' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Stack now 0 8 20 4 12 -Error: popping nterm exp (2) -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Reading a token -Next token is token '=' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () - -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (* *) + (*) + (*) -./calc.at:1441: $PREPARSER ./calc input +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.17: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) Stack now 0 8 20 4 12 -Error: popping nterm exp (3) +Error: popping nterm exp (1.7-15: 3) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' () +Next token is token '*' (1.39: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) Stack now 0 8 20 4 12 -Error: popping nterm exp (2) +Error: popping nterm exp (1.33-37: 2) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -145061,7 +137573,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145072,12 +137585,412 @@ }eg ' expout || exit 77 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stderr: +./calc.at:1435: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -145225,307 +138138,695 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1440: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1438: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1438: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +541. calc.at:1457: testing Calculator C++ parse.error=custom ... +./calc.at:1457: mv calc.y.tmp calc.y + +stderr: +./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (!!) + (1 2) = 1 -./calc.at:1445: $PREPARSER ./calc input +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1435: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) Entering state 5 Stack now 0 4 5 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) Entering state 15 Stack now 0 4 5 15 Reducing stack by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) Entering state 5 Stack now 0 4 5 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) Entering state 15 Stack now 0 4 5 15 Reducing stack by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1441: cat stderr +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1445: "$PERL" -pi -e 'use strict; + | (1 + #) = 1111 +./calc.at:1435: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +input: + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145535,8 +138836,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr +./calc.at:1438: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1441: cat stderr stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -145607,22 +138921,146 @@ Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose ... -./calc.at:1453: mv calc.y.tmp calc.y - +stderr: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1435: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (- *) + (1 2) = 1 +./calc.at:1441: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -145694,665 +139132,1007 @@ Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 Stack now 0 4 2 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 Reducing stack by rule 15 (line 106): - $1 = token '-' () - $2 = token error () + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1441: $PREPARSER ./calc input +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 Stack now 0 4 2 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 Reducing stack by rule 15 (line 106): - $1 = token '-' () - $2 = token error () + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | (# + 1) = 1111 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: cat stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: cat stderr +./calc.at:1440: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | (1 + #) = 1111 + | (* *) + (*) + (*) +input: +./calc.at:1441: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1457: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Stack now 0 8 20 4 -Shifting token error () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Stack now 0 4 12 20 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: cat stderr -./calc.at:1441: cat stderr -input: -input: - | (* *) + (*) + (*) - | (#) + (#) = 2222 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1445: $PREPARSER ./calc input -stderr: +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -146476,162 +140256,27 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1453: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stdout: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $PREPARSER ./test stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +./calc.at:1435: cat stderr +stderr: +./calc.at:1441: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1435: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -146755,17 +140400,11 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -146775,179 +140414,201 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr -input: -./calc.at:1441: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1445: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1440: cat stderr +1.6: syntax error: invalid character: '#' stderr: -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 20 5 14 Reducing stack by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) - | (1 + #) = 1111 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: $PREPARSER ./calc input +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (# + 1) = 1111 +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1438: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 20 5 14 Reducing stack by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: +./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -146957,35 +140618,132 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: cat stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 4 -Shifting token error (1.2-6: ) +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 Stack now 0 4 11 Reading a token @@ -146995,7 +140753,7 @@ Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = token error (1.1-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 @@ -147047,9 +140805,12 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | (1 + #) = 1111 +./calc.at:1438: $PREPARSER ./calc input input: + | 1 + 2 * 3 + !- ++ +./calc.at:1441: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -147149,186 +140910,89 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !- ++ -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 20 5 13 Reducing stack by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1441: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1445: "$PERL" -pi -e 'use strict; +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147339,6 +141003,8 @@ }eg ' expout || exit 77 stderr: +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Stack now 0 @@ -147348,32 +141014,35 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token @@ -147383,7 +141052,7 @@ Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 @@ -147435,108 +141104,101 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: cat stderr + | (1 + 1) / (1 - 1) stderr: +./calc.at:1435: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (#) + (#) = 2222 -./calc.at:1445: $PREPARSER ./calc input +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1440: cat stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: ./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -147547,260 +141209,276 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1438: cat stderr +input: +1.11-17: error: null divisor +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1441: cat stderr +1.11-17: error: null divisor + | (1 + # + 1) = 1111 +446. types.at:139: ok +input: +./calc.at:1440: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1438: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1441: $PREPARSER ./calc input +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: cat stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 20 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.1-8: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -147916,7 +141594,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: Starting parse Entering state 0 @@ -147927,96 +141605,212 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +527. calc.at:1435: ok +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 4 -Shifting token error (1.2-6: ) +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.1-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.1-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -148033,17 +141827,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: cat stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -148054,115 +141838,117 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1445: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) stderr: -./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Stack now 0 4 12 -Error: popping nterm exp (1) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.1-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.1-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1440: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -148305,106 +142091,9 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Stack now 0 4 12 -Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: cat stderr +./calc.at:1438: cat stderr stderr: Starting parse Entering state 0 @@ -148547,8 +142236,14 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; +input: + | (1 + # + 1) = 1111 +./calc.at:1438: $PREPARSER ./calc input +input: + | (1 + #) = 1111 + +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -148558,8 +142253,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: stdout: -./calc.at:1443: "$PERL" -ne ' +./calc.at:1445: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -148568,2274 +142265,3014 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc calc.hh + )' calc.cc -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1441: cat stderr -./calc.at:1443: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1445: $PREPARSER ./calc input -531. calc.at:1441: ok stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 4 -Shifting token error () +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1440: cat stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Stack now 0 4 -Shifting token error () +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +530. calc.at:1440: ok + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1458: mv calc.y.tmp calc.y + +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: cat stderr +./calc.at:1438: cat stderr +./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +input: + | (1 + 1) / (1 - 1) + | (# + 1) = 1111 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) + $1 = token number (5) +-> $$ = nterm exp (5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 2 10 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 2 10 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (5.4: ) +Next token is token ')' () Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (5.8: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 2 2 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (9.7: ) +Next token is token '-' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (9.11: ) +Next token is token '=' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) + $1 = token number (4) +-> $$ = nterm exp (4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 8 19 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (10.11: ) +Next token is token ')' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 8 19 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (10.13: ) +Next token is token '=' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 23 32 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 6 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 4 12 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (13.5: ) +Next token is token ')' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (13.9: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 6 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 6 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 6 8 20 29 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) Entering state 30 Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) + $1 = token number (5) +-> $$ = nterm exp (5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 2 10 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 2 10 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (4.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 4 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 4 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 4 2 10 Reading a token -Next token is token ')' (5.4: ) +Next token is token ')' () Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (5.8: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 2 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 2 2 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 2 2 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 10 Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 2 10 -Next token is token '=' (7.6: ) +Next token is token '=' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (9.7: ) +Next token is token '-' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (9.11: ) +Next token is token '=' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 6 8 18 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) + $1 = token number (4) +-> $$ = nterm exp (4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) Entering state 27 Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 8 19 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (10.11: ) +Next token is token ')' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 8 19 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 28 Stack now 0 6 8 19 28 Reading a token -Next token is token '=' (10.13: ) +Next token is token '=' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 6 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 32 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 23 32 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) Entering state 32 Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 6 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 6 4 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 4 12 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 6 4 12 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (13.5: ) +Next token is token ')' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) Entering state 8 Stack now 0 6 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' () +Shifting token '^' () Entering state 23 Stack now 0 6 8 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 6 8 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 32 Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (13.9: ) +Next token is token '=' () Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 6 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () Entering state 17 Stack now 0 6 17 Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1445: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stdout: +./types.at:139: $PREPARSER ./test +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -150845,173 +145282,194 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr -input: | 1 2 -./calc.at:1443: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -stderr: ./calc.at:1445: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1438: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token number (2) Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token number (2) Stack now 0 +449. types.at:139: 529. calc.at:1438: ok + ok +543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full ... +./calc.at:1441: cat stderr +./calc.at:1459: mv calc.y.tmp calc.y + +input: + | (1 + # + 1) = 1111 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) Stack now 0 4 12 -Error: popping nterm exp (1) +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151021,198 +145479,124 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1443: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) Stack now 0 4 12 -Error: popping nterm exp (1) +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | 1//2 -./calc.at:1443: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1445: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -input: - | (1 + 1) / (1 - 1) -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151222,310 +145606,383 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +544. calc.at:1468: testing Calculator glr.cc ... +./calc.at:1441: cat stderr +input: + | 1//2 ./calc.at:1445: $PREPARSER ./calc input +./calc.at:1458: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: + | (1 + 1) / (1 - 1) +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1468: mv calc.y.tmp calc.y + stderr: -./calc.at:1443: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Stack now 0 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 Stack now 0 4 12 20 29 Reading a token -Next token is token ')' () +Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 Stack now 0 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' () -Shifting token '/' () +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) Entering state 22 Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 22 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 22 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 19 Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 22 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token ')' () +Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 Stack now 0 8 22 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 22 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) Entering state 31 Stack now 0 8 22 31 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed ... -./calc.at:1454: mv calc.y.tmp calc.y - - | error -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Stack now 0 8 +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token '/' () +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 4 12 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Stack now 0 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 Stack now 0 4 12 20 29 Reading a token -Next token is token ')' () +Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 Stack now 0 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' () -Shifting token '/' () +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) Entering state 22 Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 22 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 22 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 19 Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 22 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token ')' () +Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 Stack now 0 8 22 4 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 22 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) Entering state 31 Stack now 0 8 22 31 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1459: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +545. calc.at:1469: testing Calculator glr2.cc ... +./calc.at:1469: mv calc.y.tmp calc.y + +./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +546. calc.at:1476: testing Calculator C++ %glr-parser ... +./calc.at:1476: mv calc.y.tmp calc.y + +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Stack now 0 8 +Error: popping nterm exp (1) Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token '/' () +Stack now 0 +./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stdout: +./calc.at:1441: cat stderr +./types.at:139: $PREPARSER ./test ./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -151537,16 +145994,37 @@ }eg ' expout || exit 77 stderr: +531. calc.at:1441: ok +./calc.at:1445: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1445: $PREPARSER ./calc input +stderr: +448. types.at:139: ok Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () Stack now 0 -./calc.at:1445: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +Stack now 0 +./calc.at:1468: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151556,95 +146034,101 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -533. calc.at:1445: ok -./calc.at:1443: cat stderr - +./calc.at:1445: cat stderr +./calc.at:1476: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS input: +547. calc.at:1476: testing Calculator glr2.cc ... +./calc.at:1476: mv calc.y.tmp calc.y + | 1 = 2 = 3 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Error: popping token '=' () Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '=' () Stack now 0 -./calc.at:1454: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +548. calc.at:1477: testing Calculator C++ %glr-parser %locations ... +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: mv calc.y.tmp calc.y + stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Error: popping token '=' () Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Error: popping nterm exp (1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '=' () Stack now 0 -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151654,65 +146138,67 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1445: cat stderr input: | | +1 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Cleanup: discarding lookahead token '+' () Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Stack now 0 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Cleanup: discarding lookahead token '+' () Stack now 0 -./calc.at:1443: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151722,32 +146208,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr -./calc.at:1443: $PREPARSER ./calc /dev/null +stdout: +./calc.at:1445: cat stderr +./types.at:139: $PREPARSER ./test +./calc.at:1445: $PREPARSER ./calc /dev/null +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) +Next token is token end of input () +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () Stack now 0 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stdout: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) +Next token is token end of input () +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () Stack now 0 -540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace ... -./calc.at:1455: mv calc.y.tmp calc.y - -./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1443: "$PERL" -pi -e 'use strict; +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151757,649 +146248,653 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +450. types.at:139: ok +./calc.at:1477: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1445: cat stderr +447. types.at:139: ok input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input + stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.17: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) +Error: popping nterm exp (3) Stack now 0 8 20 4 -Shifting token error (1.7-18: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23-25: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23-27: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' (1.39: ) +Next token is token '*' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) +Error: popping nterm exp (2) Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '+' (1.17: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) +Error: popping nterm exp (3) Stack now 0 8 20 4 -Shifting token error (1.7-18: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23-25: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.23-27: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' (1.39: ) +Next token is token '*' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 4 12 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) +Error: popping nterm exp (2) Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.33-41: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -152409,294 +146904,302 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1445: cat stderr input: +549. calc.at:1477: testing Calculator glr2.cc %locations ... | (!!) + (1 2) = 1 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1477: mv calc.y.tmp calc.y + +./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Stack now 0 4 5 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) +Next token is token '!' () +Shifting token '!' () Entering state 15 Stack now 0 4 5 15 Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) + $1 = token '!' () + $2 = token '!' () Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span} ... Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Stack now 0 4 5 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) +Next token is token '!' () +Shifting token '!' () Entering state 15 Stack now 0 4 5 15 Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) + $1 = token '!' () + $2 = token '!' () Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1478: mv calc.y.tmp calc.y + +./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -152706,310 +147209,311 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1445: cat stderr input: | (- *) + (1 2) = 1 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 4 2 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 9 Stack now 0 4 2 9 Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) + $1 = token '-' () + $2 = token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token '-' () +Shifting token '-' () Entering state 2 Stack now 0 4 2 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 9 Stack now 0 4 2 9 Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) + $1 = token '-' () + $2 = token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -153019,316 +147523,317 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1445: cat stderr input: | (* *) + (*) + (*) -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.10: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.16: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.10: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.16: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -153338,341 +147843,381 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1445: cat stderr input: | 1 + 2 * 3 + !+ ++ -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) +Next token is token '+' () +Shifting token '+' () Entering state 14 Stack now 0 8 20 5 14 Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $1 = token '!' () + $2 = token '+' () Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +stdout: +./calc.at:1453: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1453: $PREPARSER ./calc input +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) +Next token is token '+' () +Shifting token '+' () Entering state 14 Stack now 0 8 20 5 14 Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $1 = token '!' () + $2 = token '+' () Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: input: | 1 + 2 * 3 + !- ++ -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input + | 1 2 +./calc.at:1453: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 13 Stack now 0 8 20 5 13 Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $1 = token '!' () + $2 = token '-' () Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected number +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 13 Stack now 0 8 20 5 13 Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $1 = token '!' () + $2 = token '-' () Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +1.3: syntax error, unexpected number +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -153682,260 +148227,279 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1445: cat stderr input: | (#) + (#) = 2222 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1453: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Shifting token error () Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: + | 1//2 +./calc.at:1453: $PREPARSER ./calc input +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -153945,212 +148509,259 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: cat stderr +stderr: +./calc.at:1453: cat stderr input: +stdout: +./calc.at:1454: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + | (1 + #) = 1111 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: popping nterm exp (1) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1453: $PREPARSER ./calc input +input: +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 stderr: +./calc.at:1454: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: popping nterm exp (1) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -154160,206 +148771,306 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +stderr: +./calc.at:1453: cat stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 = 2 = 3 +stderr: +./calc.at:1453: $PREPARSER ./calc input +stderr: +./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.7: syntax error, unexpected '=' +./calc.at:1445: cat stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +./calc.at:1454: $PREPARSER ./calc input +stderr: +1.3: syntax error, unexpected number +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: | (# + 1) = 1111 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +stderr: +1.7: syntax error, unexpected '=' +1.3: syntax error, unexpected number +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +./calc.at:1453: cat stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1454: cat stderr +./calc.at:1449: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + stderr: +input: + | + | +1 +./calc.at:1453: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1449: $PREPARSER ./calc input +stdout: +./calc.at:1448: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -154369,244 +149080,311 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +2.1: syntax error, unexpected '+' +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1448: $PREPARSER ./calc input +stderr: +./calc.at:1445: cat stderr +stderr: +2.1: syntax error, unexpected '+' +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: +input: +./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1//2 | (1 + # + 1) = 1111 -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | 1 2 +./calc.at:1448: $PREPARSER ./calc input +input: +stderr: +stderr: +./calc.at:1453: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: popping nterm exp (1) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () + | 1 2 +./calc.at:1449: $PREPARSER ./calc input +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1453: $PREPARSER ./calc /dev/null +1.3: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.1: syntax error, unexpected end of input +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +1.3: syntax error stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: popping nterm exp (1) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token number (1) +Error: discarding token number (1) +Error: popping token error () Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +1.1: syntax error, unexpected end of input +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -154616,296 +149394,417 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: cat stderr +./calc.at:1449: cat stderr +input: +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1453: cat stderr +./calc.at:1454: $PREPARSER ./calc input +input: +input: + | 1//2 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1445: cat stderr + | 1//2 +./calc.at:1448: $PREPARSER ./calc input +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.1: syntax error, unexpected invalid token +1.3: syntax error +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1453: $PREPARSER ./calc input input: | (1 + 1) / (1 - 1) -./calc.at:1443: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +stderr: +1.3: syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 4 12 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 4 12 20 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) +Next token is token '/' () +Shifting token '/' () Entering state 22 Stack now 0 8 22 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 22 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 22 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 22 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 28 Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) Entering state 12 Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 8 22 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) Entering state 31 Stack now 0 8 22 31 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +1.3: syntax error +./calc.at:1454: cat stderr +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 stderr: +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Stack now 0 4 12 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Stack now 0 4 12 20 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) +Next token is token '/' () +Shifting token '/' () Entering state 22 Stack now 0 8 22 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 8 22 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Stack now 0 8 22 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 22 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 28 Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) Entering state 12 Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Stack now 0 8 22 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) Entering state 31 Stack now 0 8 22 31 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1443: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +1.7: syntax error, unexpected '=' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -154915,17 +149814,147 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: cat stderr -532. calc.at:1443: ok - -541. calc.at:1457: testing Calculator C++ parse.error=custom ... -./calc.at:1457: mv calc.y.tmp calc.y +stderr: +./calc.at:1453: cat stderr +1.7: syntax error, unexpected '=' +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: cat stderr +./calc.at:1449: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: cat stderr +input: + | error +./calc.at:1449: $PREPARSER ./calc input +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +533. calc.at:1445: ok +input: + | error +stderr: +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1454: cat stderr +1.1: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +input: +1.1: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1454: $PREPARSER ./calc input +stderr: +1.1: syntax error +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error, unexpected '+' +stderr: -./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1457: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.1: syntax error +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1453: cat stderr +2.1: syntax error, unexpected '+' +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1454: cat stderr +./calc.at:1448: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1454: $PREPARSER ./calc /dev/null +stderr: stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected end of file +./calc.at:1449: cat stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | 1 = 2 = 3 +./calc.at:1448: $PREPARSER ./calc input +stderr: +input: +1.1: syntax error, unexpected end of file +stderr: + | 1 = 2 = 3 +stderr: +./calc.at:1449: $PREPARSER ./calc input +1.7: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 stdout: -./calc.at:1448: "$PERL" -ne ' +stderr: +1.7: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -154934,9 +149963,22 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc calc.hh + )' calc.cc +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: input: +1.7: syntax error | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -154950,19 +149992,31 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1448: $PREPARSER ./calc input -stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: $PREPARSER ./calc input stderr: -./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1454: cat stderr +1.7: syntax error +551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span} ... input: - | 1 2 -./calc.at:1448: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: mv calc.y.tmp calc.y + +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -1.3: syntax error +./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -154973,15 +150027,90 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +input: ./calc.at:1448: cat stderr + | (* *) + (*) + (*) +./calc.at:1453: $PREPARSER ./calc input +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 +stderr: + | 1 2 +./calc.at:1457: $PREPARSER ./calc input +input: + | + | +1 ./calc.at:1448: $PREPARSER ./calc input +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1449: cat stderr stderr: -1.3: syntax error +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error ./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: cat stderr +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error +input: +./calc.at:1453: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1454: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1449: $PREPARSER ./calc input stderr: -1.3: syntax error ./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -154992,9 +150121,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +2.1: syntax error +stderr: +input: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./calc.at:1449: "$PERL" -ne ' +./calc.at:1451: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -155005,7 +150143,13 @@ || /\t/ )' calc.cc calc.hh +stderr: +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1448: cat stderr input: +./calc.at:1457: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -155019,20 +150163,16 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1448: cat stderr -input: - | error +2.1: syntax error +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1448: $PREPARSER ./calc /dev/null stderr: -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: stderr: +input: 1.1: syntax error -./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ./calc.at:1446: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -155044,9 +150184,28 @@ || /\t/ )' calc.cc calc.hh + | 1//2 +./calc.at:1457: $PREPARSER ./calc input +stderr: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: -input: 1.1: syntax error +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -155060,13 +150219,1125 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1446: $PREPARSER ./calc input +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1454: cat stderr input: +stderr: +./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr | 1 2 -./calc.at:1449: $PREPARSER ./calc input +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1449: cat stderr +./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1449: $PREPARSER ./calc /dev/null stderr: 1.3: syntax error +./calc.at:1457: cat stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error ./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1448: cat stderr +stderr: +1.3: syntax error + | 1 + 2 * 3 + !- ++ +./calc.at:1453: $PREPARSER ./calc input +stderr: +1.1: syntax error +stderr: +input: + | error +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1457: $PREPARSER ./calc input +stderr: +input: + | (- *) + (1 2) = 1 +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 19 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +input: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +./calc.at:1451: cat stderr stderr: Starting parse Entering state 0 @@ -156085,7 +152356,346 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1453: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1448: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1451: $PREPARSER ./calc input +stderr: +input: + | 1 2 +./calc.at:1446: $PREPARSER ./calc input +input: +input: +./calc.at:1457: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1449: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + | (#) + (#) = 2222 +stderr: +./calc.at:1453: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +stderr: +1.3: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + | 1 = 2 = 3 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1457: $PREPARSER ./calc input +1.3: syntax error +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: cat stderr +./calc.at:1453: cat stderr +./calc.at:1446: cat stderr +input: +./calc.at:1457: cat stderr +input: + | (* *) + (*) + (*) +input: +input: +./calc.at:1448: cat stderr +./calc.at:1454: $PREPARSER ./calc input + | 1//2 +./calc.at:1446: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1449: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1451: cat stderr +./calc.at:1453: $PREPARSER ./calc input +stderr: +input: +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1457: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +input: +stderr: ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.6: syntax error: invalid character: '#' +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 + | error +./calc.at:1448: $PREPARSER ./calc input +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1451: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +1.1: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error +./calc.at:1454: cat stderr +./calc.at:1446: cat stderr +./calc.at:1453: cat stderr +./calc.at:1457: $PREPARSER ./calc /dev/null +stderr: ./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -156096,7 +152706,1164 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stdout: +./calc.at:1443: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +input: +input: +stderr: +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1449: cat stderr +input: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1454: $PREPARSER ./calc input + | error +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1448: cat stderr +stderr: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1453: $PREPARSER ./calc input +stderr: +./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1451: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1448: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1 + 2 * 3 + !- ++ +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: cat stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: cat stderr +stderr: stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 19 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 19 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | 1 = 2 = 3 +./calc.at:1451: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1457: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -156740,7 +154507,6 @@ Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 102): -1.3: syntax error $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -157116,10 +154882,20 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1448: cat stderr +stderr: input: -./calc.at:1449: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 +./calc.at:1446: $PREPARSER ./calc input +stderr: +stderr: +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.7: syntax error +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157129,11 +154905,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1446: $PREPARSER ./calc input -input: - | 1 = 2 = 3 -./calc.at:1448: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -157149,43 +154920,34 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -./calc.at:1449: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -input: - | 1//2 -./calc.at:1449: $PREPARSER ./calc input +./calc.at:1449: cat stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -157196,25 +154958,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.3: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.3: syntax error -./calc.at:1448: cat stderr -./calc.at:1446: cat stderr -input: -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157224,17 +154969,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | - | +1 -./calc.at:1448: $PREPARSER ./calc input - | 1//2 -./calc.at:1446: $PREPARSER ./calc input -stderr: -2.1: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: +./calc.at:1454: cat stderr Starting parse Entering state 0 Stack now 0 @@ -157249,23 +154985,55 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -2.1: syntax error -./calc.at:1449: cat stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +1.7: syntax error +input: + | (* *) + (*) + (*) +./calc.at:1449: $PREPARSER ./calc input +input: +./calc.at:1453: cat stderr + | 1 2 +./calc.at:1443: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1448: cat stderr +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error Starting parse Entering state 0 Stack now 0 @@ -157280,57 +155048,15 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | error -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.1: syntax error ./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr -./calc.at:1446: cat stderr -stderr: -1.1: syntax error -./calc.at:1448: $PREPARSER ./calc /dev/null input: - | error -stderr: -1.1: syntax error -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157341,36 +155067,8 @@ }eg ' expout || exit 77 stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -1.1: syntax error -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: cat stderr ./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -157381,31 +155079,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: cat stderr -./calc.at:1448: cat stderr -input: -./calc.at:1446: cat stderr - | 1 = 2 = 3 -./calc.at:1449: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1453: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.7: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1448: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1446: $PREPARSER ./calc input stderr: stderr: -1.7: syntax error -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -157420,79 +155107,21 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1451: cat stderr 1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +1.10: syntax error +1.16: syntax error + | (!!) + (1 2) = 1 +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1446: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157502,7 +155131,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: "$PERL" -pi -e 'use strict; +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +1.6: syntax error: invalid character: '#' +input: +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157512,7 +155146,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: "$PERL" -pi -e 'use strict; + | + | +1 +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157522,26 +155158,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: cat stderr -./calc.at:1448: cat stderr -./calc.at:1446: cat stderr -input: - | - | +1 -./calc.at:1449: $PREPARSER ./calc input +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: $PREPARSER ./calc input input: +./calc.at:1443: cat stderr +stderr: + | (* *) + (*) + (*) +./calc.at:1454: cat stderr +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1448: $PREPARSER ./calc input stderr: +input: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: cat stderr | | +1 2.1: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: $PREPARSER ./calc input +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1446: $PREPARSER ./calc input + | 1//2 +stderr: +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1453: cat stderr stderr: - | (!!) + (1 2) = 1 stderr: -2.1: syntax error -./calc.at:1448: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -157567,20 +155218,43 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +2.1: syntax error +./calc.at:1457: cat stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +input: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +input: + | (1 + #) = 1111 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1453: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -157607,10 +155281,70 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1449: $PREPARSER ./calc input +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1457: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +1.11-17: error: null divisor +./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -157621,7 +155355,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: cat stderr +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +input: +1.11-17: error: null divisor +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 + | 1 + 2 * 3 + !- ++ +./calc.at:1446: cat stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1451: cat stderr ./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -157632,15 +155392,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: $PREPARSER ./calc /dev/null -./calc.at:1446: cat stderr -stderr: ./calc.at:1446: $PREPARSER ./calc /dev/null -1.1: syntax error +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: cat stderr +./calc.at:1451: $PREPARSER ./calc /dev/null +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -1.1: syntax error +stderr: +./calc.at:1448: cat stderr +./calc.at:1453: cat stderr Starting parse Entering state 0 Stack now 0 @@ -157649,9 +155430,41 @@ 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token end of input (1.1: ) Stack now 0 +./calc.at:1454: cat stderr +1.1: syntax error +./calc.at:1457: cat stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr stderr: +input: +538. calc.at:1453: stdout: + ok +stderr: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1458: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1448: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +1.1: syntax error +input: +input: ./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -157662,15 +155475,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: + | error + | (# + 1) = 1111 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +stderr: +./calc.at:1458: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1457: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -./calc.at:1446: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157680,40 +155537,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1448: $PREPARSER ./calc input -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ./calc.at:1449: cat stderr -./calc.at:1446: cat stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1449: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1446: $PREPARSER ./calc input stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1448: "$PERL" -pi -e 'use strict; +stderr: +input: +input: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157723,7 +155575,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: "$PERL" -pi -e 'use strict; +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1451: cat stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157734,6 +155590,39 @@ }eg ' expout || exit 77 stderr: + | (#) + (#) = 2222 +./calc.at:1449: $PREPARSER ./calc input +stderr: +./calc.at:1454: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: + | 1 2 +./calc.at:1443: cat stderr +input: +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1455: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +stderr: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1451: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -158051,9 +155940,36 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1448: cat stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +stderr: +input: ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1454: $PREPARSER ./calc input +input: stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1455: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -158284,3397 +156200,103 @@ Stack now 0 8 20 4 12 Next token is token '*' (1.39: ) Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1449: cat stderr - | (* *) + (*) + (*) -./calc.at:1448: $PREPARSER ./calc input -input: -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1449: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -input: - | (!!) + (1 2) = 1 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1448: cat stderr -./calc.at:1449: cat stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1446: cat stderr -stderr: -input: - | (- *) + (1 2) = 1 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -./calc.at:1446: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 - | 1 + 2 * 3 + !- ++ -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: cat stderr -./calc.at:1446: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1448: cat stderr -./calc.at:1449: $PREPARSER ./calc input -input: - | (* *) + (*) + (*) -stderr: -./calc.at:1446: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error - | (#) + (#) = 2222 -./calc.at:1448: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1448: cat stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1446: cat stderr -./calc.at:1448: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.6: syntax error: invalid character: '#' -input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1446: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1449: $PREPARSER ./calc input -stderr: -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1448: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -input: - | (# + 1) = 1111 - | 1 + 2 * 3 + !- ++ -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error: invalid character: '#' -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | (#) + (#) = 2222 -./calc.at:1449: $PREPARSER ./calc input -stderr: -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1454: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -./calc.at:1446: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1448: cat stderr -input: -input: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1454: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1446: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -./calc.at:1448: $PREPARSER ./calc input -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1449: cat stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.6: syntax error: invalid character: '#' -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + #) = 1111 -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -1.6: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -1.3: syntax error, unexpected number -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: cat stderr -stderr: -1.3: syntax error, unexpected number -stderr: -./calc.at:1446: cat stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stdout: -./calc.at:1453: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - | (1 + 1) / (1 - 1) -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: $PREPARSER ./calc input -stderr: -./calc.at:1454: cat stderr -input: -input: -./calc.at:1449: cat stderr -1.11-17: error: null divisor -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | (1 + #) = 1111 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1446: $PREPARSER ./calc input -input: -stderr: -stderr: -1.11-17: error: null divisor - | 1//2 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: $PREPARSER ./calc input -input: -stderr: - | (# + 1) = 1111 -./calc.at:1449: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -1.2: syntax error: invalid character: '#' -input: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' - | 1 2 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1448: cat stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error, unexpected number -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1446: cat stderr -1.3: syntax error, unexpected number -535. calc.at:1448: ok -./calc.at:1449: cat stderr -./calc.at:1454: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1446: $PREPARSER ./calc input -input: -input: -./calc.at:1453: cat stderr - - | (1 + # + 1) = 1111 -./calc.at:1449: $PREPARSER ./calc input - | error -./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -1.6: syntax error: invalid character: '#' -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: - | 1//2 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error, unexpected invalid token -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error, unexpected invalid token -stderr: -1.6: syntax error: invalid character: '#' -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1446: cat stderr -./calc.at:1454: cat stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: cat stderr -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1446: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1454: $PREPARSER ./calc input -input: - | error -stderr: -./calc.at:1453: $PREPARSER ./calc input -1.7: syntax error, unexpected '=' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1449: cat stderr -stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' -stderr: -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error, unexpected invalid token - | (1 + 1) / (1 - 1) -./calc.at:1449: $PREPARSER ./calc input -542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: mv calc.y.tmp calc.y - -stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: cat stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: -stderr: -./calc.at:1453: cat stderr -./calc.at:1446: cat stderr -1.11-17: error: null divisor - | - | +1 -./calc.at:1454: $PREPARSER ./calc input -input: -input: -stderr: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1446: $PREPARSER ./calc input -2.1: syntax error, unexpected '+' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1453: $PREPARSER ./calc input -stderr: -stderr: -2.1: syntax error, unexpected '+' -1.7: syntax error, unexpected '=' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1449: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.7: syntax error, unexpected '=' -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -536. calc.at:1449: ok -./calc.at:1454: cat stderr -./calc.at:1453: cat stderr -./calc.at:1454: $PREPARSER ./calc /dev/null -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -1.1: syntax error, unexpected end of file -./calc.at:1446: cat stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - | - | +1 -./calc.at:1453: $PREPARSER ./calc input -stderr: -1.1: syntax error, unexpected end of file -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -534. calc.at:1446: ok -stderr: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -2.1: syntax error, unexpected '+' -./calc.at:1454: cat stderr -stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -input: -stdout: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1457: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -./calc.at:1453: cat stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: $PREPARSER ./calc /dev/null -./calc.at:1458: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -./calc.at:1457: $PREPARSER ./calc input -1.1: syntax error, unexpected end of input -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 2 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1454: cat stderr -./calc.at:1453: cat stderr -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1453: $PREPARSER ./calc input -input: -stderr: - | (!!) + (1 2) = 1 -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1454: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) 1.1-46: error: 4444 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full ... -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error 1.1-46: error: 4444 != 1 -./calc.at:1459: mv calc.y.tmp calc.y - -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1457: cat stderr -./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: - | 1//2 -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1457: $PREPARSER ./calc input -stderr: -./calc.at:1454: cat stderr -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1453: cat stderr -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) - | (- *) + (1 2) = 1 -./calc.at:1454: $PREPARSER ./calc input -stderr: -input: -544. calc.at:1468: testing Calculator glr.cc ... -./calc.at:1468: mv calc.y.tmp calc.y - -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -161684,17 +156306,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: input: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 - | error -./calc.at:1457: $PREPARSER ./calc input -stderr: -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -161704,40 +156317,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -./calc.at:1453: cat stderr - | (* *) + (*) + (*) -./calc.at:1454: $PREPARSER ./calc input -input: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1453: $PREPARSER ./calc input -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1457: cat stderr -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 | 1 = 2 = 3 -./calc.at:1457: $PREPARSER ./calc input -stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1443: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -161747,171 +156332,108 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1459: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: cat stderr -./calc.at:1454: cat stderr ./calc.at:1457: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1453: $PREPARSER ./calc input -input: -stderr: -input: - | - | +1 -./calc.at:1457: $PREPARSER ./calc input -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 stderr: stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1448: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1449: cat stderr +stdout: input: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1457: cat stderr -./calc.at:1453: cat stderr -./calc.at:1457: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: +./calc.at:1446: cat stderr | 1 + 2 * 3 + !+ ++ -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1454: cat stderr -./calc.at:1457: cat stderr -stderr: +./calc.at:1457: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test input: -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 + | 1//2 +./calc.at:1458: $PREPARSER ./calc input | (#) + (#) = 2222 -./calc.at:1457: $PREPARSER ./calc input -stderr: -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1468: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 +./calc.at:1448: $PREPARSER ./calc input input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | 1 + 2 * 3 + !- ++ -./calc.at:1453: $PREPARSER ./calc input -stderr: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1457: cat stderr -./calc.at:1454: cat stderr -input: -input: - | (!!) + (1 2) = 1 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1457: $PREPARSER ./calc input | (1 + #) = 1111 -./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1453: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -input: - | (#) + (#) = 2222 -./calc.at:1453: $PREPARSER ./calc input ./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -161922,21 +156444,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: cat stderr -stderr: -./calc.at:1457: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -input: - | (# + 1) = 1111 - | (- *) + (1 2) = 1 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1453: "$PERL" -pi -e 'use strict; +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -161946,47 +156454,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1454: $PREPARSER ./calc input -stderr: -stderr: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -1.2: syntax error: invalid character: '#' -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: cat stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -stderr: +./calc.at:1449: $PREPARSER ./calc input input: -1.2: syntax error: invalid character: '#' - | (1 + #) = 1111 -./calc.at:1453: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1457: cat stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1454: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1457: $PREPARSER ./calc input -input: -./calc.at:1453: "$PERL" -pi -e 'use strict; +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -161997,306 +156471,171 @@ }eg ' expout || exit 77 stderr: - | (1 + # + 1) = 1111 -./calc.at:1454: $PREPARSER ./calc input -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1453: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) stderr: -input: 1.6: syntax error: invalid character: '#' - | (# + 1) = 1111 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1457: cat stderr -stderr: 1.2: syntax error: invalid character: '#' -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: +1.8: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1454: cat stderr -1.2: syntax error: invalid character: '#' - | 1 + 2 * 3 + !+ ++ -./calc.at:1457: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: $PREPARSER ./calc input -stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1453: cat stderr -1.11-17: error: null divisor -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.11-17: error: null divisor -stderr: - | (1 + # + 1) = 1111 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1454: cat stderr -input: - | 1 + 2 * 3 + !- ++ -539. calc.at:1454: ok -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: cat stderr -stderr: - -input: - | (1 + 1) / (1 - 1) -./calc.at:1453: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -stderr: -1.11-17: error: null divisor -input: - | (#) + (#) = 2222 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: cat stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -538. calc.at:1453: ok -./calc.at:1457: cat stderr - -input: - | (1 + #) = 1111 -./calc.at:1457: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1457: cat stderr -545. calc.at:1469: testing Calculator glr2.cc ... -./calc.at:1469: mv calc.y.tmp calc.y - -./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: - | (# + 1) = 1111 -./calc.at:1457: $PREPARSER ./calc input -stderr: -stderr: -syntax error: invalid character: '#' -stdout: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -stderr: -syntax error: invalid character: '#' -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1451: $PREPARSER ./calc input -546. calc.at:1476: testing Calculator C++ %glr-parser ... -./calc.at:1476: mv calc.y.tmp calc.y - -stderr: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: cat stderr -./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1457: $PREPARSER ./calc input -input: -stderr: - | 1 2 -syntax error: invalid character: '#' -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -1.3: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -./calc.at:1457: cat stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: +./calc.at:1443: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) | (1 + 1) / (1 - 1) -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1451: cat stderr -stderr: -error: null divisor -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -input: - | 1//2 -./calc.at:1451: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -./calc.at:1457: cat stderr -541. calc.at:1457: ok -./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: cat stderr - -input: -stderr: - | error -./calc.at:1451: $PREPARSER ./calc input -stdout: -./calc.at:1455: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -./calc.at:1476: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -1.1: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1455: $PREPARSER ./calc input +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: cat stderr +452. types.at:139: ok +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -163381,11 +157720,181 @@ Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1451: $PREPARSER ./calc input +./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: stderr: -1.7: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.11-17: error: null divisor +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1451: cat stderr +552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose ... +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +input: +input: +./calc.at:1479: mv calc.y.tmp calc.y + + | 1 + 2 * 3 + !- ++ Starting parse Entering state 0 Stack now 0 @@ -164470,16 +158979,8 @@ Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -input: -547. calc.at:1476: testing Calculator glr2.cc ... - | 1 2 -./calc.at:1476: mv calc.y.tmp calc.y - -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164489,8 +158990,57 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | + | +1 +stderr: +./calc.at:1443: $PREPARSER ./calc input +stderr: +input: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +input: +./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1448: cat stderr +stderr: + | (!!) + (1 2) = 1 +1.11-17: error: null divisor + +./calc.at:1449: cat stderr + | 1 2 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1455: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: stderr: +stderr: +input: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -164521,7 +159071,24 @@ Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +input: +input: +./calc.at:1446: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: $PREPARSER ./calc input stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -164551,8 +159118,187 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1451: cat stderr +./calc.at:1458: cat stderr +./calc.at:1457: cat stderr +stderr: +stderr: +./calc.at:1454: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 input: +1.6: syntax error: invalid character: '#' ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -164563,18 +159309,245 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1451: $PREPARSER ./calc input +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1449: $PREPARSER ./calc input stderr: -./calc.at:1455: cat stderr -2.1: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 input: +539. calc.at:1454: ok + | (#) + (#) = 2222 +stderr: + | error +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1457: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +./calc.at:1455: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: +1.2: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +input: + +./calc.at:1443: cat stderr | 1//2 ./calc.at:1455: $PREPARSER ./calc input -2.1: syntax error +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: $PREPARSER ./calc /dev/null +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: cat stderr stderr: Starting parse Entering state 0 @@ -164619,21 +159592,25 @@ Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1446: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr +stderr: +stderr: +./calc.at:1457: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 @@ -164672,29 +159649,27 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 +./calc.at:1449: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +input: +input: + | (* *) + (*) + (*) + | (# + 1) = 1111 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1448: $PREPARSER ./calc input ./calc.at:1451: cat stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: $PREPARSER ./calc /dev/null -./calc.at:1455: cat stderr input: - | error -./calc.at:1455: $PREPARSER ./calc input -stderr: -1.1: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error -stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164704,55 +159679,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1451: cat stderr input: ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -164764,78 +159690,183 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1451: $PREPARSER ./calc input stderr: -./calc.at:1455: cat stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (1 + #) = 1111 +./calc.at:1457: $PREPARSER ./calc input input: - | 1 = 2 = 3 -./calc.at:1455: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '=' (1.7: ) -LAC: checking lookahead '=': Err -LAC: checking lookahead end of file: R6 G8 Err -LAC: checking lookahead number: R6 G8 Err -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -LAC: checking lookahead NEG: R6 G8 Err -LAC: checking lookahead '^': S23 -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +syntax error: invalid character: '#' + | (1 + # + 1) = 1111 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +./calc.at:1455: cat stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +input: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 + | (- *) + (1 2) = 1 +./calc.at:1458: $PREPARSER ./calc input +input: +stderr: +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164845,256 +159876,183 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +syntax error: invalid character: '#' + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1443: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '=' (1.7: ) -LAC: checking lookahead '=': Err -LAC: checking lookahead end of file: R6 G8 Err -LAC: checking lookahead number: R6 G8 Err -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -LAC: checking lookahead NEG: R6 G8 Err -LAC: checking lookahead '^': S23 -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1451: cat stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1455: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1451: $PREPARSER ./calc input -input: -stderr: - | - | +1 -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '+' (2.1: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -LAC: checking lookahead end of file: S16 -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (1.1-2.0: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -LAC: checking lookahead end of file: S16 -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: cat stderr -./calc.at:1455: cat stderr -./calc.at:1455: $PREPARSER ./calc /dev/null -input: - | (- *) + (1 2) = 1 -./calc.at:1451: $PREPARSER ./calc input +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of file (1.1: ) -LAC: initial context established for end of file -LAC: checking lookahead end of file: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.6: syntax error: invalid character: '#' +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: +input: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose ... 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 -stderr: ./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of file (1.1: ) -LAC: initial context established for end of file -LAC: checking lookahead end of file: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 + | error +./calc.at:1455: $PREPARSER ./calc input +stderr: +./calc.at:1479: mv calc.y.tmp calc.y + stderr: 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1457: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165104,24 +160062,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1455: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1451: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1451: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error Starting parse Entering state 0 Stack now 0 @@ -165132,23 +160074,7 @@ Stack now 0 4 Reading a token Next token is token ')' (1.2: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -165156,7 +160082,7 @@ Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -165178,7 +160104,7 @@ Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 @@ -165193,16 +160119,14 @@ Shifting token number (1.11: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -165211,7 +160135,6 @@ Stack now 0 8 20 4 12 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 4 12 20 Reading a token @@ -165219,16 +160142,14 @@ Shifting token number (1.15: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.17: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -165237,32 +160158,15 @@ Stack now 0 8 20 4 12 Next token is token '+' (1.17: ) Shifting token '+' (1.17: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 4 12 20 Reading a token Next token is token ')' (1.18: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' (1.17: ) Stack now 0 8 20 4 12 Error: popping nterm exp (1.7-15: 3) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 @@ -165270,7 +160174,7 @@ Shifting token ')' (1.18: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -165279,9 +160183,7 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.20: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -165290,7 +160192,6 @@ Stack now 0 8 Next token is token '+' (1.20: ) Shifting token '+' (1.20: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -165300,55 +160201,30 @@ Stack now 0 8 20 4 Reading a token Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err Error: discarding token '*' (1.23: ) Error: popping token error (1.23: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.25: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err Error: discarding token '*' (1.25: ) Error: popping token error (1.23: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.27: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err Error: discarding token '*' (1.27: ) Error: popping token error (1.23-25: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 20 4 11 @@ -165357,7 +160233,7 @@ Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -165366,9 +160242,7 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.30: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -165377,7 +160251,6 @@ Stack now 0 8 Next token is token '+' (1.30: ) Shifting token '+' (1.30: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -165390,7 +160263,7 @@ Shifting token number (1.33: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 @@ -165405,16 +160278,14 @@ Shifting token number (1.37: 2) Entering state 1 Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token Next token is token '*' (1.39: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': R9 G12 S21 -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -165423,42 +160294,22 @@ Stack now 0 8 20 4 12 Next token is token '*' (1.39: ) Shifting token '*' (1.39: ) -LAC: initial context discarded due to shift Entering state 21 Stack now 0 8 20 4 12 21 Reading a token Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' (1.39: ) Stack now 0 8 20 4 12 Error: popping nterm exp (1.33-37: 2) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err Error: discarding token '*' (1.41: ) Error: popping token error (1.33-41: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 @@ -165467,7 +160318,7 @@ Shifting token ')' (1.42: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -165476,9 +160327,7 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.44: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -165487,7 +160336,6 @@ Stack now 0 8 Next token is token '=' (1.44: ) Shifting token '=' (1.44: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -165495,16 +160343,14 @@ Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.47-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -165514,29 +160360,69 @@ Stack now 0 8 Next token is token '\n' (1.47-2.0: ) Shifting token '\n' (1.47-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1448: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./calc.at:1446: cat stderr ./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -165547,7 +160433,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +stdout: Starting parse Entering state 0 Stack now 0 @@ -165558,23 +160444,7 @@ Stack now 0 4 Reading a token Next token is token ')' (1.2: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -165582,7 +160452,7 @@ Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -165604,7 +160474,7 @@ Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 @@ -165619,16 +160489,14 @@ Shifting token number (1.11: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -165637,7 +160505,6 @@ Stack now 0 8 20 4 12 Next token is token '+' (1.13: ) Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 4 12 20 Reading a token @@ -165645,16 +160512,14 @@ Shifting token number (1.15: 1) Entering state 1 Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 29 Stack now 0 8 20 4 12 20 29 Reading a token Next token is token '+' (1.17: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G12 S20 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -165663,32 +160528,15 @@ Stack now 0 8 20 4 12 Next token is token '+' (1.17: ) Shifting token '+' (1.17: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 4 12 20 Reading a token Next token is token ')' (1.18: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' (1.17: ) Stack now 0 8 20 4 12 Error: popping nterm exp (1.7-15: 3) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 @@ -165696,7 +160544,7 @@ Shifting token ')' (1.18: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -165705,9 +160553,7 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.20: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -165716,7 +160562,6 @@ Stack now 0 8 Next token is token '+' (1.20: ) Shifting token '+' (1.20: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -165726,55 +160571,30 @@ Stack now 0 8 20 4 Reading a token Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.23: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err Error: discarding token '*' (1.23: ) Error: popping token error (1.23: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.25: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err Error: discarding token '*' (1.25: ) Error: popping token error (1.23: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token Next token is token '*' (1.27: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err Error: discarding token '*' (1.27: ) Error: popping token error (1.23-25: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 20 4 11 @@ -165783,7 +160603,7 @@ Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -165792,9 +160612,7 @@ Stack now 0 8 20 29 Reading a token Next token is token '+' (1.30: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -165803,7 +160621,6 @@ Stack now 0 8 Next token is token '+' (1.30: ) Shifting token '+' (1.30: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -165816,7 +160633,7 @@ Shifting token number (1.33: 1) Entering state 1 Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 @@ -165831,16 +160648,14 @@ Shifting token number (1.37: 2) Entering state 1 Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Stack now 0 8 20 4 12 21 30 Reading a token Next token is token '*' (1.39: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': R9 G12 S21 -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -165849,42 +160664,22 @@ Stack now 0 8 20 4 12 Next token is token '*' (1.39: ) Shifting token '*' (1.39: ) -LAC: initial context discarded due to shift Entering state 21 Stack now 0 8 20 4 12 21 Reading a token Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' (1.39: ) Stack now 0 8 20 4 12 Error: popping nterm exp (1.33-37: 2) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 Next token is token '*' (1.41: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err Error: discarding token '*' (1.41: ) Error: popping token error (1.33-41: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 20 4 11 @@ -165893,7 +160688,7 @@ Shifting token ')' (1.42: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -165902,9 +160697,7 @@ Stack now 0 8 20 29 Reading a token Next token is token '=' (1.44: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -165913,7 +160706,6 @@ Stack now 0 8 Next token is token '=' (1.44: ) Shifting token '=' (1.44: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -165921,16 +160713,14 @@ Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.47-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-42: 4444) $2 = token '=' (1.44: ) $3 = nterm exp (1.46: 1) @@ -165940,1175 +160730,50 @@ Stack now 0 8 Next token is token '\n' (1.47-2.0: ) Shifting token '\n' (1.47-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-46: 4444) $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: cat stderr -./calc.at:1455: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1451: $PREPARSER ./calc input -stderr: -input: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1455: $PREPARSER ./calc input -stderr: -./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 120): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -LAC: initial context discarded due to shift -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1451: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 120): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -LAC: initial context discarded due to shift -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1455: cat stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1455: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 119): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -LAC: initial context discarded due to shift -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 119): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -LAC: initial context discarded due to shift -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (# + 1) = 1111 +./calc.at:1457: $PREPARSER ./calc input input: - | (#) + (#) = 2222 -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: - | (* *) + (*) + (*) -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: $PREPARSER ./calc input -stderr: -./calc.at:1451: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R7 G8 S24 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1458: cat stderr stderr: -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R7 G8 S20 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err +Next token is token invalid token (1.1: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err LAC: checking lookahead end of file: Err LAC: checking lookahead number: S1 LAC: checking lookahead '=': Err @@ -167118,286 +160783,72 @@ LAC: checking lookahead '/': Err LAC: checking lookahead NEG: Err LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err +LAC: checking lookahead '\n': S3 LAC: checking lookahead '(': S4 LAC: checking lookahead ')': Err LAC: checking lookahead '!': S5 -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -LAC: initial context established for '*' -LAC: checking lookahead '*': Err -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R7 G8 S24 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + #) = 1111 -./calc.at:1451: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1455: cat stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1451: cat stderr -stderr: -Starting parse -Entering state 0 +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 121): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1449: cat stderr input: - | (# + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 121): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1448: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1468: $PREPARSER ./calc input +stderr: + | + | +1 +./calc.at:1458: $PREPARSER ./calc input +554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose ... stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1443: cat stderr input: -1.2: syntax error: invalid character: '#' - | 1 + 2 * 3 + !- ++ -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1480: mv calc.y.tmp calc.y + + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1451: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167407,7 +160858,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1479: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.11-17: error: null divisor +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -167416,7 +160874,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -167431,7 +160889,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -167446,16 +160904,14 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -167463,7 +160919,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -167472,7 +160928,6 @@ Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -167481,17 +160936,60 @@ Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 122): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | (* *) + (*) + (*) +1.11-17: error: null divisor +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1458: cat stderr + | 1 2 +./calc.at:1455: cat stderr +input: +./calc.at:1468: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: $PREPARSER ./calc /dev/null +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1457: cat stderr +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: Starting parse Entering state 0 @@ -167501,7 +160999,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -167516,7 +161014,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -167531,16 +161029,14 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '+' (1.11: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': R9 G29 R7 G8 S20 -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -167548,7 +161044,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -167557,7 +161053,6 @@ Stack now 0 8 Next token is token '+' (1.11: ) Shifting token '+' (1.11: ) -LAC: initial context discarded due to shift Entering state 20 Stack now 0 8 20 Reading a token @@ -167566,39 +161061,34 @@ Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 122): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1451: cat stderr -input: -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input +./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -./calc.at:1455: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr stderr: input: -1.6: syntax error: invalid character: '#' - | (#) + (#) = 2222 -./calc.at:1455: $PREPARSER ./calc input +stderr: +syntax error + | 1 + 2 * 3 + !- ++ +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +input: ./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -167609,7 +161099,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1448: $PREPARSER ./calc input +input: stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -167619,765 +161113,297 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-8: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.17-2.0: ) Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R7 G8 S18 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift -Entering state 18 -Stack now 0 8 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.17-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1451: cat stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + 1) / (1 - 1) -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1455: cat stderr +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (1 + # + 1) = 1111 + | 1 = 2 = 3 +./calc.at:1457: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +536. calc.at:1449: stderr: +stdout: +./calc.at:1476: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + 1.11-17: error: null divisor - | (1 + #) = 1111 -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: cat stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -537. calc.at:1451: ok -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: cat stderr -input: - - | (# + 1) = 1111 -./calc.at:1455: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '=' (1.7: ) +LAC: checking lookahead '=': Err +LAC: checking lookahead end of file: R6 G8 Err +LAC: checking lookahead number: R6 G8 Err +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +LAC: checking lookahead NEG: R6 G8 Err +LAC: checking lookahead '^': S23 +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1455: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -168388,3040 +161414,139 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.10: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-10: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): +Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.8: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-8: ) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -LAC: initial context established for number -LAC: checking lookahead number: Err -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -LAC: initial context discarded due to error recovery -Shifting token error (1.2-10: ) +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 118): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: cat stderr -548. calc.at:1477: testing Calculator C++ %glr-parser %locations ... -./calc.at:1477: mv calc.y.tmp calc.y - -input: -./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | (1 + 1) / (1 - 1) -./calc.at:1455: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R7 G12 S26 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -LAC: initial context discarded due to shift -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -LAC: initial context discarded due to shift -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R10 G8 S24 -Reducing stack by rule 10 (line 106): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 -Stack now 0 4 12 20 29 +Stack now 0 8 20 29 Reading a token -Next token is token ')' (1.7: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R7 G12 S26 -Reducing stack by rule 7 (line 103): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -LAC: initial context discarded due to shift -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 92): +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token ')' (1.17: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -LAC: initial context discarded due to shift -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 117): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R10 G8 S24 -Reducing stack by rule 10 (line 106): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -LAC: initial context discarded due to shift +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (2.1: ) -Shifting token end of file (2.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: cat stderr -540. calc.at:1455: ok - -549. calc.at:1477: testing Calculator glr2.cc %locations ... -./calc.at:1477: mv calc.y.tmp calc.y - -./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1477: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -stdout: -./calc.at:1459: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1459: $PREPARSER ./calc input -stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | 1 2 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: cat stderr -input: - | 1//2 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1459: cat stderr -input: - | error -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: cat stderr -input: - | - | +1 -./calc.at:1459: $PREPARSER ./calc input -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stdout: -./calc.at:1458: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1459: cat stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1459: $PREPARSER ./calc /dev/null -./calc.at:1458: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -./calc.at:1459: cat stderr - | 1 2 -./calc.at:1458: $PREPARSER ./calc input -stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1458: cat stderr -./calc.at:1459: cat stderr -input: - | 1//2 -./calc.at:1458: $PREPARSER ./calc input -stderr: -input: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) - | (!!) + (1 2) = 1 -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1458: cat stderr -./calc.at:1459: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -input: - | error -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: cat stderr -./calc.at:1458: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1459: $PREPARSER ./calc input -input: -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | 1 = 2 = 3 -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1459: cat stderr -./calc.at:1458: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1459: $PREPARSER ./calc input -input: - | - | +1 -./calc.at:1458: $PREPARSER ./calc input -stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -input: -./calc.at:1458: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1459: $PREPARSER ./calc input -stderr: -./calc.at:1458: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: cat stderr -./calc.at:1458: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1459: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1459: cat stderr -./calc.at:1458: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -input: - | (!!) + (1 2) = 1 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: $PREPARSER ./calc input -stderr: -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1459: cat stderr -./calc.at:1458: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1459: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -1.2: syntax error: invalid character: '#' -./calc.at:1459: cat stderr -./calc.at:1458: cat stderr -input: -input: - | (* *) + (*) + (*) -./calc.at:1458: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1459: $PREPARSER ./calc input -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1458: cat stderr -./calc.at:1459: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1458: $PREPARSER ./calc input -input: -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.11-17: error: null divisor -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: cat stderr -stderr: -543. calc.at:1459: ok -./calc.at:1458: cat stderr - -input: - | (#) + (#) = 2222 -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1458: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1458: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span} ... -stderr: -./calc.at:1478: mv calc.y.tmp calc.y - -1.2: syntax error: invalid character: '#' -./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1458: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1458: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1458: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11-17: error: null divisor -./calc.at:1458: cat stderr -542. calc.at:1458: ok - -./calc.at:1478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span} ... -./calc.at:1478: mv calc.y.tmp calc.y - -./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -stdout: -./calc.at:1468: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1468: $PREPARSER ./calc input -stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | 1 2 -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stdout: -./calc.at:1476: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1468: cat stderr -./calc.at:1476: $PREPARSER ./calc input -stderr: -input: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1468: $PREPARSER ./calc input -stderr: -input: -stderr: - | 1 2 -./calc.at:1476: $PREPARSER ./calc input -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -stderr: -syntax error -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1468: cat stderr -input: -input: - | 1//2 - | error -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -stderr: -syntax error -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: cat stderr -./calc.at:1476: cat stderr -input: -input: - | 1 = 2 = 3 - | error -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -stderr: -syntax error -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1468: cat stderr -input: - | 1 = 2 = 3 -input: -./calc.at:1476: $PREPARSER ./calc input - | - | +1 -./calc.at:1468: $PREPARSER ./calc input -stderr: -stderr: -syntax error -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -syntax error -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1468: cat stderr -input: - | - | +1 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1468: $PREPARSER ./calc /dev/null -stderr: -syntax error -stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -stderr: -syntax error -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: cat stderr -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: $PREPARSER ./calc /dev/null -stderr: -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1476: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1476: cat stderr -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -input: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -stderr: -syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -error: 2222 != 1 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: cat stderr -./calc.at:1476: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1468: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1468: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1476: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1468: $PREPARSER ./calc input -syntax error -syntax error -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -stderr: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1468: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1468: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1476: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1468: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1476: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1468: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + # + 1) = 1111 -./calc.at:1468: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1476: cat stderr -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -input: - | (1 + # + 1) = 1111 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1468: cat stderr -stderr: -stdout: -./calc.at:1477: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1468: $PREPARSER ./calc input -stderr: -error: null divisor -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1476: cat stderr -input: -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1476: $PREPARSER ./calc input -stderr: -stderr: -error: null divisor -error: null divisor -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 2 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: cat stderr -stderr: -1.3: syntax error -./calc.at:1476: cat stderr -544. calc.at:1468: ok -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -546. calc.at:1476: ok - -./calc.at:1477: cat stderr -input: - | 1//2 -./calc.at:1477: $PREPARSER ./calc input - -stderr: -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: - | error -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.7: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose ... -./calc.at:1479: mv calc.y.tmp calc.y - -./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1477: cat stderr -553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose ... -./calc.at:1479: mv calc.y.tmp calc.y - -input: - | - | +1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -2.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1477: $PREPARSER ./calc /dev/null -stderr: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: -stderr: -stdout: - | (!!) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1469: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1469: $PREPARSER ./calc input -stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./calc.at:1477: cat stderr - | 1 2 -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -syntax error -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1479: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1469: cat stderr -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -1.2: syntax error -1.10: syntax error -1.16: syntax error - | 1//2 -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -./calc.at:1477: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1469: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1477: $PREPARSER ./calc input -input: - | error -stderr: -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1469: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 = 2 = 3 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1477: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1469: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -./calc.at:1469: $PREPARSER ./calc input -stderr: -stdout: -stderr: -./calc.at:1476: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -syntax error -./calc.at:1476: $PREPARSER ./calc input -stderr: -input: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input -stderr: -stderr: -input: -1.2: syntax error: invalid character: '#' -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1476: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1469: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: $PREPARSER ./calc /dev/null -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1476: cat stderr -stderr: -stderr: -syntax error -1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -1.6: syntax error: invalid character: '#' - | 1//2 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1469: cat stderr -./calc.at:1477: cat stderr -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | (1 + 1) / (1 - 1) -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1469: $PREPARSER ./calc input -stderr: -stderr: -1.11-17: error: null divisor -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11-17: error: null divisor -./calc.at:1476: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -input: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -stderr: -syntax error -548. calc.at:1477: ./calc.at:1469: cat stderr - ok -input: - | (!!) + (1 2) = 1 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -stderr: -syntax error -error: 2222 != 1 -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -error: 2222 != 1 -./calc.at:1476: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -stderr: -syntax error -input: - | (- *) + (1 2) = 1 -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./calc.at:1478: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1476: cat stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input: - | - | +1 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -stderr: -stderr: -syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (* *) + (*) + (*) -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error -554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose ... -stderr: -./calc.at:1480: mv calc.y.tmp calc.y - -stderr: -input: -./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -syntax error -syntax error -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.3: syntax error -stderr: -syntax error -syntax error -syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.3: syntax error -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1476: $PREPARSER ./calc /dev/null -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -stderr: -syntax error -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1478: cat stderr -./calc.at:1469: $PREPARSER ./calc input -stderr: -input: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1478: $PREPARSER ./calc input -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1469: $PREPARSER ./calc input -stderr: -1.3: syntax error -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1478: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 - | error -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.1: syntax error -./calc.at:1469: cat stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error -input: - | (#) + (#) = 2222 -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1480: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: - | (!!) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1478: cat stderr -stderr: -syntax error -error: 2222 != 1 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -error: 2222 != 1 -input: - | 1 = 2 = 3 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1469: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.7: syntax error -input: - | (1 + #) = 1111 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -./calc.at:1476: cat stderr -stderr: -syntax error: invalid character: '#' -input: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error -syntax error -error: 2222 != 1 -input: - | - | +1 -./calc.at:1478: $PREPARSER ./calc input -stderr: -./calc.at:1469: cat stderr -2.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -2.1: syntax error -input: - | (# + 1) = 1111 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -stderr: -syntax error: invalid character: '#' -input: - | (* *) + (*) + (*) -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1478: cat stderr -stderr: -syntax error -syntax error -syntax error -./calc.at:1478: $PREPARSER ./calc /dev/null -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.1: syntax error -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -stderr: -1.1: syntax error -./calc.at:1469: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + # + 1) = 1111 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1478: cat stderr -./calc.at:1476: cat stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1478: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -syntax error: invalid character: '#' -./calc.at:1476: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1469: cat stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./calc.at:1478: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1469: $PREPARSER ./calc input -input: -stderr: -error: null divisor - | (!!) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -error: null divisor -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1476: cat stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1469: cat stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr -stderr: -545. calc.at:1469: syntax error: invalid character: '#' -syntax error: invalid character: '#' - ok -input: - | (- *) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1478: cat stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (* *) + (*) + (*) -./calc.at:1478: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1478: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1476: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1478: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose ... -input: -./calc.at:1480: mv calc.y.tmp calc.y - - | 1 + 2 * 3 + !- ++ -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1476: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171431,56 +161556,108 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +1.11-17: error: null divisor syntax error: invalid character: '#' -./calc.at:1478: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1476: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | (1 + 1) / (1 - 1) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1451: cat stderr ./calc.at:1476: $PREPARSER ./calc input stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -error: null divisor ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -./calc.at:1478: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171490,13 +161667,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1476: cat stderr -547. calc.at:1476: ./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1468: cat stderr + +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171506,19 +161679,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - ok -./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1478: cat stderr - -input: - | (# + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171528,97 +161689,102 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input +./calc.at:1458: cat stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -556. calc.at:1482: testing Calculator C++ %glr-parser %debug ... -./calc.at:1482: mv calc.y.tmp calc.y - +./calc.at:1457: cat stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr input: - | (1 + 1) / (1 - 1) -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.11-17: error: null divisor -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1443: cat stderr +input: + | 1 2 +./calc.at:1446: cat stderr + | 1//2 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1468: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1458: $PREPARSER ./calc input +input: +input: +./calc.at:1448: cat stderr + | 1 + 2 * 3 + !+ ++ stderr: -1.11-17: error: null divisor -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -550. calc.at:1478: ok - -./calc.at:1482: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -557. calc.at:1482: testing Calculator glr2.cc %debug ... -./calc.at:1482: mv calc.y.tmp calc.y - -./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1451: $PREPARSER ./calc input stderr: -stdout: -./calc.at:1477: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - + | (1 + 1) / (1 - 1) +./calc.at:1457: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +LAC: checking lookahead '=': Err +LAC: checking lookahead end of file: R6 G8 Err +LAC: checking lookahead number: R6 G8 Err +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +LAC: checking lookahead NEG: R6 G8 Err +LAC: checking lookahead '^': S23 +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +syntax error +syntax error input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1477: $PREPARSER ./calc input -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 input: - | 1 2 -./calc.at:1477: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS + | (#) + (#) = 2222 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171628,74 +161794,428 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -input: - | 1//2 -./calc.at:1477: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: - | error -./calc.at:1477: $PREPARSER ./calc input stderr: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 stderr: -1.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1477: $PREPARSER ./calc input stderr: -1.7: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr +./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1458: cat stderr +error: null divisor +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +error: null divisor +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: | | +1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -2.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -2.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171705,14 +162225,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1477: $PREPARSER ./calc /dev/null -stderr: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; +535. calc.at:1448: ok +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171722,24 +162236,161 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1455: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1451: $PREPARSER ./calc input input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: "$PERL" -pi -e 'use strict; + | (!!) + (1 2) = 1 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171749,18 +162400,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input +./calc.at:1457: cat stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +LAC: checking lookahead end of file: S16 +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) 1.1-16: error: 2222 != 1 -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1476: cat stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171770,20 +162457,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: cat stderr stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1446: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +LAC: checking lookahead end of file: S16 +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +input: +input: +./calc.at:1480: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1//2 + | error +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc input + +541. calc.at:1457: ./calc.at:1443: cat stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171793,20 +162519,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr + ok input: - | (* *) + (*) + (*) -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171816,41 +162531,257 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1477: $PREPARSER ./calc input stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1477: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1446: $PREPARSER ./calc input stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr +stderr: +syntax error +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error +./calc.at:1455: $PREPARSER ./calc /dev/null input: - | (#) + (#) = 2222 -./calc.at:1477: $PREPARSER ./calc input +./calc.at:1458: cat stderr +./calc.at:1451: cat stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose ... + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of file (1.1: ) +LAC: initial context established for end of file +LAC: checking lookahead end of file: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 + | (* *) + (*) + (*) stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171860,16 +162791,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr input: - | (1 + #) = 1111 -./calc.at:1477: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1477: "$PERL" -pi -e 'use strict; + | (- *) + (1 2) = 1 +./calc.at:1458: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of file (1.1: ) +LAC: initial context established for end of file +LAC: checking lookahead end of file: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1480: mv calc.y.tmp calc.y + + | (#) + (#) = 2222 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171879,16 +162833,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input +stderr: +./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 stderr: 1.2: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.8: syntax error: invalid character: '#' +./calc.at:1468: cat stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1476: cat stderr 1.2: syntax error: invalid character: '#' -./calc.at:1477: "$PERL" -pi -e 'use strict; +1.8: syntax error: invalid character: '#' +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171898,16 +162858,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1477: "$PERL" -pi -e 'use strict; +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171917,16 +162873,168 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr input: - | (1 + 1) / (1 - 1) -./calc.at:1477: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1468: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stdout: stderr: -1.11-17: error: null divisor -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1446: cat stderr + | error +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171936,17 +163044,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -549. calc.at:1477: ok - -558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose ... -./calc.at:1485: mv calc.y.tmp calc.y - -./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1485: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +syntax error +./calc.at:1455: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr stderr: -stdout: -./calc.at:1478: "$PERL" -ne ' +syntax error +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -171958,32 +163065,177 @@ )' calc.cc input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1478: $PREPARSER ./calc input stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error + | (# + 1) = 1111 +./calc.at:1446: $PREPARSER ./calc input stderr: input: - | 1 2 -./calc.at:1478: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1455: $PREPARSER ./calc input +syntax error +./calc.at:1451: cat stderr +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (* *) + (*) + (*) +./calc.at:1458: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error -./calc.at:1478: "$PERL" -pi -e 'use strict; +input: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171993,29 +163245,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: cat stderr -input: -stderr: -stdout: - | 1//2 -./calc.at:1480: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error -input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -172029,126 +163258,104 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: cat stderr -input: -input: - | error -./calc.at:1478: $PREPARSER ./calc input - | 1 2 -./calc.at:1480: $PREPARSER ./calc input -stderr: -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.1: syntax error -syntax error, unexpected number -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected number -./calc.at:1478: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1480: cat stderr -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.7: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1//2 -./calc.at:1480: $PREPARSER ./calc input -1.7: syntax error -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1478: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: cat stderr -stderr: -2.1: syntax error -input: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1480: $PREPARSER ./calc input -stderr: -2.1: syntax error -stderr: -syntax error, unexpected invalid token -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected invalid token -./calc.at:1478: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1459: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172158,49 +163365,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1480: cat stderr -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | (1 + #) = 1111 +./calc.at:1451: $PREPARSER ./calc input stderr: -1.1: syntax error - | 1 = 2 = 3 -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error, unexpected '=' -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '=' -stderr: -stdout: -./calc.at:1482: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1478: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172210,878 +163380,1760 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -./calc.at:1482: $PREPARSER ./calc input -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -stderr: -input: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '+' (1.13: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token '*' (1.39: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': R9 G12 S21 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +LAC: initial context discarded due to shift +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +LAC: initial context discarded due to shift Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) +Next token is token '\n' (1.47-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1443: cat stderr +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 10 +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token ')' (1.2: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '+' (1.13: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G12 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '*' (1.25: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '*' (1.39: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': R9 G12 S21 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +LAC: initial context discarded due to shift +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '=' (1.44: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +LAC: initial context discarded due to shift Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) +Next token is token '\n' (1.47-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: + | 1 + 2 * 3 + !+ ++ +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: cat stderr +556. calc.at:1482: testing Calculator C++ %glr-parser %debug ... +./calc.at:1482: mv calc.y.tmp calc.y + +1.6: syntax error: invalid character: '#' +./calc.at:1446: cat stderr +./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: cat stderr +./calc.at:1458: cat stderr +stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1455: cat stderr +input: +input: +input: + | + | +1 +input: +input: + | (1 + # + 1) = 1111 +./calc.at:1446: $PREPARSER ./calc input + | 1 2 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1459: $PREPARSER ./calc input +557. calc.at:1482: testing Calculator glr2.cc %debug ... +./calc.at:1482: mv calc.y.tmp calc.y + +input: + | 1 = 2 = 3 + | 1 + 2 * 3 + !+ ++ +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1476: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1455: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 28 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1451: cat stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +stderr: +syntax error +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 120): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Next token is token '=' (1.14: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +LAC: initial context discarded due to shift Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Next token is token '\n' (1.17-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 8 +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1451: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +input: +stderr: + | 1 + 2 * 3 + !- ++ +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1443: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 120): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +LAC: initial context discarded due to shift Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) +Next token is token '\n' (1.17-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.2: syntax error: invalid character: '#' +./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -173093,905 +165145,792 @@ ' expout || exit 77 stderr: stderr: -syntax error, unexpected '+' +stderr: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) -Entering state 27 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: cat stderr +./calc.at:1459: cat stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: cat stderr +./calc.at:1476: cat stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1458: $PREPARSER ./calc input +stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1468: cat stderr +input: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +input: + | (- *) + (1 2) = 1 +./calc.at:1455: $PREPARSER ./calc input + | 1//2 +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1468: $PREPARSER ./calc /dev/null +input: +input: + | (1 + 1) / (1 - 1) + | + | +1 +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1446: $PREPARSER ./calc input +stderr: +stderr: +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +./calc.at:1451: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 119): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Stack now 0 8 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 +Stack now 0 8 20 4 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '=' (1.15: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +LAC: initial context discarded due to shift Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +stderr: +syntax error +stderr: +stderr: +stderr: +stderr: +input: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +syntax error +input: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 119): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Stack now 0 8 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 28 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '=' (1.15: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +LAC: initial context discarded due to shift Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (1 + # + 1) = 1111 +./calc.at:1451: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (256) -Shifting token "number" (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) -Entering state 27 +Stack now 0 8 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (64) -Shifting token "number" (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) -Entering state 27 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 2 -./calc.at:1482: $PREPARSER ./calc input -syntax error, unexpected '+' -./calc.at:1478: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (#) + (#) = 2222 +./calc.at:1458: $PREPARSER ./calc input input: - | (!!) + (1 2) = 1 -stderr: -./calc.at:1478: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1443: $PREPARSER ./calc input stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1480: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174001,9 +165940,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: $PREPARSER ./calc /dev/null +1.6: syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: cat stderr stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174013,40 +165955,137 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected end of input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr stderr: -syntax error, unexpected end of input -./calc.at:1478: cat stderr -input: - | 1//2 -./calc.at:1482: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -stderr: -./calc.at:1478: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1480: "$PERL" -pi -e 'use strict; +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174056,84 +166095,159 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -stderr: -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 + | error +./calc.at:1459: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '/' () -Shifting token '/' () +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1478: cat stderr -./calc.at:1482: cat stderr -input: - | (* *) + (*) + (*) -input: -./calc.at:1478: $PREPARSER ./calc input - | error -./calc.at:1482: $PREPARSER ./calc input stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1468: cat stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174144,33 +166258,137 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1480: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -Starting parse -Entering state 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (!!) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1476: cat stderr +./calc.at:1458: cat stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174180,28 +166398,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1446: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1468: $PREPARSER ./calc input stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc /dev/null +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1478: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174211,94 +166415,482 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -./calc.at:1480: cat stderr -stderr: -input: -input: input: - | 1 = 2 = 3 - | (- *) + (1 2) = 1 - | 1 + 2 * 3 + !- ++ -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1478: $PREPARSER ./calc input -stderr: -stderr: +534. calc.at:1446: ok stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '=' () +Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R7 G8 S24 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (1 + #) = 1111 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: $PREPARSER ./calc input +stderr: syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +stderr: stderr: +1.6: syntax error: invalid character: '#' stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '=' () +Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R7 G8 S20 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +LAC: initial context discarded due to error recovery +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +LAC: initial context established for '*' +LAC: checking lookahead '*': Err +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R7 G8 S24 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1459: cat stderr syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1482: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +1.6: syntax error: invalid character: '#' +input: + + | (1 + 1) / (1 - 1) +./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1451: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174308,7 +166900,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; +stderr: +1.11-17: error: null divisor +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174318,8 +166913,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174329,127 +166923,125 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: cat stderr -input: -input: - | (* *) + (*) + (*) -./calc.at:1482: cat stderr - | (#) + (#) = 2222 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' input: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1482: $PREPARSER ./calc input -stderr: +./calc.at:1458: cat stderr +./calc.at:1455: cat stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' stderr: + | 1 = 2 = 3 Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr -./calc.at:1480: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1478: $PREPARSER ./calc input +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.11-17: error: null divisor input: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1480: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1459: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: cat stderr stderr: -./calc.at:1482: cat stderr input: -./calc.at:1482: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 + !- ++ -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | 1 + 2 * 3 + !+ ++ +1.2: syntax error: invalid character: '#' +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174460,59 +167052,119 @@ }eg ' expout || exit 77 stderr: +./calc.at:1476: cat stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -./calc.at:1478: cat stderr +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1455: $PREPARSER ./calc input input: - | (# + 1) = 1111 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1480: cat stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1476: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1482: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1480: $PREPARSER ./calc input -input: -./calc.at:1478: "$PERL" -pi -e 'use strict; +syntax error +error: 2222 != 1 +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174522,531 +167174,532 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +1.2: syntax error: invalid character: '#' stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '+' (1.11: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 121): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1443: cat stderr +537. calc.at:1451: ./calc.at:1458: cat stderr + ok +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Stack now 0 8 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '+' (1.11: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '*' () +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 121): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr syntax error -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1458: $PREPARSER ./calc input +input: +./calc.at:1459: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1455: $PREPARSER ./calc input +stderr: +stderr: +1.6: syntax error: invalid character: '#' +stdout: + +./calc.at:1477: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + | + | +1 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Stack now 0 4 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr -stderr: +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '+' (1.11: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '*' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 122): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1459: $PREPARSER ./calc input syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Stack now 0 8 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '+' (1.11: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': R9 G29 R7 G8 S20 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +LAC: initial context discarded due to shift +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 122): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: - | (1 + # + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1458: cat stderr +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1468: cat stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175056,9 +167709,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1480: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175068,274 +167720,133 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1480: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 stderr: -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1478: cat stderr -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) stderr: +./calc.at:1458: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -input: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1478: $PREPARSER ./calc input -syntax error: invalid character: '#' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Stack now 0 4 11 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: cat stderr +stderr: stderr: +input: 1.11-17: error: null divisor -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr + | 1 2 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1459: cat stderr + | (- *) + (1 2) = 1 +input: stderr: + | (#) + (#) = 2222 +./calc.at:1455: $PREPARSER ./calc input +stderr: +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1459: $PREPARSER ./calc /dev/null +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.11-17: error: null divisor -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; +input: +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175345,140 +167856,172 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input stderr: -./calc.at:1482: cat stderr -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr -input: +558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose ... +syntax error +syntax error +error: 2222 != 1 stderr: -syntax error: invalid character: '#' - | (- *) + (1 2) = 1 -551. calc.at:1478: ok -./calc.at:1482: $PREPARSER ./calc input stderr: +stderr: +1.3: syntax error + | (!!) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +./calc.at:1443: cat stderr +./calc.at:1485: mv calc.y.tmp calc.y + +syntax error +error: 2222 != 1 +./calc.at:1458: cat stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-2: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-8: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (1.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +Next token is token '\n' (1.17-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1480: "$PERL" -pi -e 'use strict; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175488,264 +168031,279 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-2: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-8: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (1.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R7 G8 S18 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +Next token is token '\n' (1.17-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1480: cat stderr -input: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) | (1 + # + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr +./calc.at:1443: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -input: - | (* *) + (*) + (*) -./calc.at:1482: $PREPARSER ./calc input +542. calc.at:1458: ok +syntax error +error: 2222 != 1 stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' () -syntax error -Shifting token error () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Stack now 0 4 11 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Stack now 0 8 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1477: cat stderr +./calc.at:1459: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175755,127 +168313,128 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' () -syntax error -Shifting token error () +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Stack now 0 4 11 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Stack now 0 8 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1480: cat stderr +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: - | (1 + 1) / (1 - 1) -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; +input: + | 1//2 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175885,16 +168444,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1468: cat stderr +559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose ... +./calc.at:1459: $PREPARSER ./calc input stderr: -error: null divisor -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: mv calc.y.tmp calc.y + +./calc.at:1476: cat stderr stderr: -error: null divisor -./calc.at:1482: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1455: cat stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175904,283 +168471,137 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1480: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose ... -./calc.at:1485: mv calc.y.tmp calc.y - +./calc.at:1443: cat stderr +input: + | (* *) + (*) + (*) +input: + | (1 + #) = 1111 +./calc.at:1468: $PREPARSER ./calc input +1.3: syntax error +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -554. calc.at:1480: ok -./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +syntax error +syntax error +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 + !- ++ -./calc.at:1482: $PREPARSER ./calc input stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -Starting parse -Entering state 0 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '\n' (1.15-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: "$PERL" -pi -e 'use strict; +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176190,208 +168611,289 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr input: - | (#) + (#) = 2222 -./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Stack now 0 4 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) +Next token is token '\n' (1.15-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: + | (- *) + (1 2) = 1 +./calc.at:1459: cat stderr +./calc.at:1476: $PREPARSER ./calc input +syntax error +syntax error +syntax error + | (1 + 1) / (1 - 1) +./calc.at:1443: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr stderr: +input: + | (!!) + (1 2) = 1 +./calc.at:1459: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176401,173 +168903,165 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1486: mv calc.y.tmp calc.y - -./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1455: cat stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 input: - | (1 + #) = 1111 -./calc.at:1482: $PREPARSER ./calc input stderr: + | error +./calc.at:1477: $PREPARSER ./calc input Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Stack now 0 8 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1482: "$PERL" -pi -e 'use strict; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1468: cat stderr + | (# + 1) = 1111 +./calc.at:1455: $PREPARSER ./calc input +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176577,152 +169071,269 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr input: - | (# + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 + !+ ++ stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '+' (1.4: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token number (1.6: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-6: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '\n' (1.15-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1443: cat stderr +./calc.at:1459: cat stderr stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '+' (1.4: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token number (1.6: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.1-6: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '\n' (1.15-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +532. calc.at:1443: ok +560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1486: mv calc.y.tmp calc.y + +./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +input: + | (- *) + (1 2) = 1 +./calc.at:1459: $PREPARSER ./calc input +input: +input: +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176732,180 +169343,318 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr +./calc.at:1477: cat stderr + | 1 + 2 * 3 + !- ++ + | (* *) + (*) + (*) +./calc.at:1476: $PREPARSER ./calc input +stderr: +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1455: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: + +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +input: +./calc.at:1485: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 | (1 + # + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc input +stderr: +syntax error +syntax error +syntax error +stderr: +stderr: +1.7: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '+' (1.8: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token number (1.10: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '\n' (1.19-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +1.7: syntax error stderr: +./calc.at:1478: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '+' (1.8: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' () +Next token is token number (1.10: 1) +LAC: initial context established for number +LAC: checking lookahead number: Err +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +LAC: initial context discarded due to error recovery +Shifting token error (1.2-10: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 118): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '\n' (1.19-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1459: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176915,247 +169664,1204 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1478: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1455: cat stderr +stderr: +./calc.at:1476: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +./calc.at:1477: cat stderr +stderr: input: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) | (1 + 1) / (1 - 1) -./calc.at:1486: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1468: cat stderr +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | 1 2 +stderr: + | + | +1 +./calc.at:1477: $PREPARSER ./calc input +input: +./calc.at:1478: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1468: $PREPARSER ./calc input +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +2.1: syntax error +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 +Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) +Next token is token ')' (1.7: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R7 G12 S26 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +LAC: initial context discarded due to shift Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 +Stack now 0 8 Reading a token -Next token is token '/' () -Shifting token '/' () +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 +Stack now 0 8 22 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 +Stack now 0 8 22 4 12 19 28 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) +Next token is token ')' (1.17: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +LAC: initial context discarded due to shift Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) Entering state 31 +Stack now 0 8 22 31 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R10 G8 S24 +Reducing stack by rule 10 (line 106): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 +Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" () +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +2.1: syntax error +stderr: +1.3: syntax error +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R7 G12 S26 +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R10 G8 S24 +Reducing stack by rule 10 (line 106): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (2.1: ) +Shifting token end of file (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1459: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose ... + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: mv calc.y.tmp calc.y + +input: +stderr: +stderr: +./calc.at:1477: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1455: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1478: cat stderr +./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1477: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +540. calc.at:1455: ok +stderr: + | 1//2 +stderr: +stderr: +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.1: syntax error +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: cat stderr +stderr: +./calc.at:1477: cat stderr +./calc.at:1486: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + + | (1 + #) = 1111 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1477: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1478: cat stderr +syntax error: invalid character: '#' +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +input: + | error +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: cat stderr +./calc.at:1476: cat stderr +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1468: cat stderr +1.1: syntax error +input: + | (#) + (#) = 2222 +./calc.at:1476: $PREPARSER ./calc input +input: +./calc.at:1477: cat stderr +input: +stderr: + | (#) + (#) = 2222 +./calc.at:1459: $PREPARSER ./calc input +stderr: + | (# + 1) = 1111 +./calc.at:1468: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +input: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error: invalid character: '#' +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +./calc.at:1459: cat stderr +input: +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1 = 2 = 3 +./calc.at:1478: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +1.7: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1459: $PREPARSER ./calc input +562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose ... +stderr: +1.7: syntax error +./calc.at:1476: cat stderr +stderr: +./calc.at:1487: mv calc.y.tmp calc.y + +1.6: syntax error: invalid character: '#' +./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +./calc.at:1477: cat stderr + | (1 + #) = 1111 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1468: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +input: + | (1 + # + 1) = 1111 +./calc.at:1468: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: cat stderr +stderr: +stderr: +syntax error: invalid character: '#' +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1476: cat stderr +stderr: +stderr: +2.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +input: +2.1: syntax error + | (# + 1) = 1111 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1459: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1468: cat stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +input: + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1468: $PREPARSER ./calc input +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +error: null divisor +./calc.at:1477: cat stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +stderr: +error: null divisor +./calc.at:1478: $PREPARSER ./calc /dev/null +input: +stderr: +1.1: syntax error + | (* *) + (*) + (*) +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1459: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: + | (1 + # + 1) = 1111 +./calc.at:1468: cat stderr +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1476: cat stderr +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' +544. calc.at:1468: ok +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (1 + # + 1) = 1111 +./calc.at:1476: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1477: cat stderr +stderr: +./calc.at:1459: cat stderr +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +input: +syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1478: cat stderr +./calc.at:1459: $PREPARSER ./calc input +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +stderr: + +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.11-17: error: null divisor +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1459: cat stderr +stderr: +./calc.at:1476: cat stderr +stderr: +543. calc.at:1459: input: + | (1 + 1) / (1 - 1) +./calc.at:1476: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + ok +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +error: null divisor +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (#) + (#) = 2222 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1476: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: +546. calc.at:1476: ok +563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +./calc.at:1489: mv calc.y.tmp calc.y + +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +input: + | (!!) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1477: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +stdout: +./types.at:139: $PREPARSER ./test +stderr: +564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +./calc.at:1489: mv calc.y.tmp calc.y + +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | (1 + #) = 1111 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +451. types.at:139: ok +1.6: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +input: + | (- *) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input + +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1491: mv calc.y.tmp calc.y + +./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (# + 1) = 1111 +./calc.at:1477: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1478: cat stderr +./calc.at:1477: cat stderr +input: +input: + | (* *) + (*) + (*) +./calc.at:1478: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +stderr: +./calc.at:1477: $PREPARSER ./calc input +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +1.6: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: mv calc.y.tmp calc.y + +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1478: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: cat stderr +./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1477: $PREPARSER ./calc input +stderr: +stderr: +1.11-17: error: null divisor +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +./calc.at:1478: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -ne ' +./calc.at:1469: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -177164,8 +170870,10 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc calc.hh + )' calc.cc +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -177180,8 +170888,9 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177192,20 +170901,32 @@ }eg ' expout || exit 77 stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +548. calc.at:1477: ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok + stderr: input: -./calc.at:1482: cat stderr | 1 2 -./calc.at:1479: $PREPARSER ./calc input -556. calc.at:1482: stderr: - ok -syntax error, unexpected number -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: $PREPARSER ./calc input stderr: -syntax error, unexpected number +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +input: +./calc.at:1492: mv calc.y.tmp calc.y -./calc.at:1479: "$PERL" -pi -e 'use strict; + | (1 + #) = 1111 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +stderr: +syntax error +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177215,16 +170936,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr -input: - | 1//2 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177234,20 +170946,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr +./calc.at:1469: cat stderr +./calc.at:1478: cat stderr input: - | error -./calc.at:1479: $PREPARSER ./calc input + | 1//2 +./calc.at:1469: $PREPARSER ./calc input stderr: -syntax error, unexpected invalid token -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected invalid token -561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1486: mv calc.y.tmp calc.y - -./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1479: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177257,16 +170966,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr input: - | 1 = 2 = 3 -./calc.at:1479: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1469: cat stderr stderr: -syntax error, unexpected '=' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.2: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1469: $PREPARSER ./calc input stderr: -syntax error, unexpected '=' -./calc.at:1479: "$PERL" -pi -e 'use strict; +syntax error +stderr: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +1.2: syntax error: invalid character: '#' +./calc.at:1492: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177276,17 +170994,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr -input: - | - | +1 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected '+' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '+' -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177296,14 +171004,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr -./calc.at:1479: $PREPARSER ./calc /dev/null +./calc.at:1478: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1478: $PREPARSER ./calc input stderr: -syntax error, unexpected end of input -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: cat stderr stderr: -syntax error, unexpected end of input -./calc.at:1479: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177313,25 +171024,55 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1479: $PREPARSER ./calc input stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stdout: +./calc.at:1476: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + | 1 = 2 = 3 +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1478: cat stderr stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; +syntax error +input: + | (1 + 1) / (1 - 1) +./calc.at:1478: $PREPARSER ./calc input +input: +stderr: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +1.11-17: error: null divisor +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1476: $PREPARSER ./calc input +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11-17: error: null divisor +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177341,18 +171082,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr input: - | (!!) + (1 2) = 1 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177362,20 +171093,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr + | 1 2 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1469: cat stderr +stderr: +./calc.at:1478: cat stderr +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +550. calc.at:1478: ok +syntax error input: - | (- *) + (1 2) = 1 -./calc.at:1479: $PREPARSER ./calc input + | + | +1 +./calc.at:1469: $PREPARSER ./calc input stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177385,20 +171122,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177408,20 +171132,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr + +./calc.at:1476: cat stderr +./calc.at:1469: cat stderr input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1479: $PREPARSER ./calc input +./calc.at:1469: $PREPARSER ./calc /dev/null + | 1//2 +./calc.at:1476: $PREPARSER ./calc input stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1479: $PREPARSER ./calc input +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177431,18 +171159,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177452,16 +171169,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr +./calc.at:1476: cat stderr +./calc.at:1469: cat stderr input: - | (1 + #) = 1111 -./calc.at:1479: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +./calc.at:1469: $PREPARSER ./calc input + | error +./calc.at:1476: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1479: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +stderr: +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177471,16 +171205,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177490,16 +171215,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr +568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1492: mv calc.y.tmp calc.y + +./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1476: cat stderr input: - | (1 + # + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input +./calc.at:1469: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1469: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1476: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1479: "$PERL" -pi -e 'use strict; +syntax error +error: 2222 != 1 +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177509,16 +171249,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1479: $PREPARSER ./calc input -stderr: -error: null divisor -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177528,15 +171259,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1479: cat stderr -552. calc.at:1479: ok - -562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose ... -./calc.at:1487: mv calc.y.tmp calc.y - -./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1469: cat stderr +input: +./calc.at:1476: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1469: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 stderr: +input: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 stdout: +./calc.at:1476: $PREPARSER ./calc input +stderr: ./calc.at:1480: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -177548,7 +171287,41 @@ || /\t/ )' calc.cc +stderr: +stdout: +./calc.at:1482: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +syntax error +syntax error +error: 2222 != 1 +./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +syntax error input: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1482: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -177563,9 +171336,6 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1480: $PREPARSER ./calc input -./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./calc.at:1485: "$PERL" -ne ' @@ -177579,10 +171349,11 @@ || /\t/ )' calc.cc +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: - | 1 2 -./calc.at:1480: $PREPARSER ./calc input +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -177599,19 +171370,14 @@ | (2^2)^3 = 64 ./calc.at:1485: $PREPARSER ./calc input stderr: -syntax error, unexpected number -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -177619,11 +171385,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -177631,23 +171397,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -177657,16 +171423,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -177675,21 +171441,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -177697,11 +171463,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -177713,29 +171479,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -177749,22 +171515,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -177773,12 +171539,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -177787,11 +171553,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -177801,11 +171567,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -177813,23 +171579,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -177842,22 +171608,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -177866,12 +171632,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -177885,16 +171651,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -177902,7 +171668,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -177913,16 +171679,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -177932,16 +171698,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -177950,12 +171716,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -177964,11 +171730,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -177986,28 +171752,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -178020,22 +171786,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -178044,12 +171810,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178058,21 +171824,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -178080,16 +171846,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -178099,16 +171865,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -178122,22 +171888,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -178146,22 +171912,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -178173,11 +171939,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -178185,16 +171951,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -178203,7 +171969,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -178211,7 +171977,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -178221,16 +171987,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -178239,12 +172005,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178253,21 +172019,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -178275,11 +172041,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -178287,23 +172053,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -178313,16 +172079,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -178331,12 +172097,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178346,11 +172112,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -178358,16 +172124,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -178376,7 +172142,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -178387,16 +172153,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -178406,16 +172172,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -178424,25 +172190,25 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -178452,14 +172218,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -178467,11 +172234,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -178479,23 +172246,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -178505,16 +172272,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -178523,21 +172290,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -178545,11 +172312,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -178561,29 +172328,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -178597,22 +172364,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -178621,12 +172388,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178635,11 +172402,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178649,11 +172416,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -178661,23 +172428,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -178690,22 +172457,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -178714,12 +172481,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178733,16 +172500,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -178750,7 +172517,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -178761,16 +172528,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -178780,16 +172547,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -178798,12 +172565,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178812,11 +172579,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178834,28 +172601,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -178868,22 +172635,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -178892,12 +172659,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -178906,21 +172673,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -178928,16 +172695,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -178947,16 +172714,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -178970,22 +172737,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -178994,22 +172761,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -179021,11 +172788,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -179033,16 +172800,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -179051,7 +172818,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -179059,7 +172826,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -179069,16 +172836,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -179087,12 +172854,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -179101,21 +172868,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -179123,11 +172890,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -179135,23 +172902,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -179161,16 +172928,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -179179,12 +172946,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -179194,11 +172961,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -179206,16 +172973,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -179224,7 +172991,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -179235,16 +173002,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -179254,16 +173021,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -179272,217 +173039,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -input: -./calc.at:1480: cat stderr - | 1 2 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -./calc.at:1480: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1//2 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1480: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | error -./calc.at:1480: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -syntax error, unexpected invalid token -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected invalid token -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | error -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1480: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -input: - | 1 = 2 = 3 -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error, unexpected '=' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error, unexpected '=' -./calc.at:1485: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1480: cat stderr Starting parse Entering state 0 Reading a token @@ -179494,9 +173067,9 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (2) Shifting token number (2) @@ -179504,109 +173077,59 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 27 +Entering state 29 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -input: - | - | +1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error, unexpected '+' -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '+' -./calc.at:1485: cat stderr -input: - | - | +1 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 82): @@ -179614,495 +173137,121 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1480: cat stderr -./calc.at:1480: $PREPARSER ./calc /dev/null -stderr: -syntax error, unexpected end of input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error, unexpected end of input -./calc.at:1485: cat stderr -./calc.at:1485: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1480: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 Reading a token -Next token is token '*' () +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) + $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) + $1 = nterm exp (-5) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) + $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1480: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) @@ -180110,11 +173259,11 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 10 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token Next token is token number (2) Shifting token number (2) @@ -180122,158 +173271,71 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 30 +Entering state 32 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) - $2 = token '*' () + $2 = token '^' () $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token +-> $$ = nterm exp (1) +Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 27 +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) + $1 = nterm exp (-1) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: - | (!!) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1485: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (!!) + (1 2) = 1 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1480: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) @@ -180281,34 +173343,42 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Entering state 10 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (-1) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -180324,77 +173394,49 @@ Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) + $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +-> $$ = nterm exp (1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) + $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) @@ -180402,157 +173444,81 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 27 +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1485: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (- *) + (1 2) = 1 -stderr: -./calc.at:1485: $PREPARSER ./calc input -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token number (1) Shifting token number (1) @@ -180560,467 +173526,364 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) + $1 = nterm exp (-4) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) + $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1480: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (-1) +Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) + $1 = nterm exp (2) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1480: $PREPARSER ./calc input -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1480: $PREPARSER ./calc input -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1485: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1480: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) + $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -./calc.at:1480: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (4) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token @@ -181029,46 +173892,13 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + #) = 1111 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1485: cat stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -syntax error: invalid character: '#' - | 1 + 2 * 3 + !+ ++ -./calc.at:1485: $PREPARSER ./calc input + | 1 2 + | 1 2 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -181105,50 +173935,51 @@ -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token number (7) +Shifting token number (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) +Entering state 27 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token number (1) Shifting token number (1) @@ -181174,126 +174005,97 @@ Shifting token '*' () Entering state 21 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token number (3) Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) -Entering state 30 +Entering state 10 Reading a token -Next token is token '+' () +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) Entering state 29 -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1480: cat stderr -./calc.at:1485: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -Starting parse -Entering state 0 +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) @@ -181301,11 +174103,11 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 10 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token Next token is token number (2) Shifting token number (2) @@ -181313,319 +174115,172 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1485: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input -stderr: -input: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (#) + (#) = 2222 -./calc.at:1485: $PREPARSER ./calc input -syntax error: invalid character: '#' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) + $1 = nterm exp (1) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) + $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1480: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + 1) / (1 - 1) -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1485: cat stderr -stderr: -error: null divisor -input: - | (1 + #) = 1111 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) @@ -181633,79 +174288,81 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Entering state 10 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 -Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (-1) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token number (1) Shifting token number (1) @@ -181713,413 +174370,364 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1485: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -555. calc.at:1480: ok -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (-4) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +-> $$ = nterm exp (-1) +Entering state 28 Reading a token Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (2) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (256) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (4) Entering state 8 Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (64) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token @@ -182128,7 +174736,49 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1469: cat stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +syntax error, unexpected number +input: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1485: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +syntax error, unexpected number +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -182138,74 +174788,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr input: - | (1 + 1) / (1 - 1) -./calc.at:1485: $PREPARSER ./calc input + | (* *) + (*) + (*) stderr: +./calc.at:1469: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +stderr: +syntax error +syntax error +syntax error +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +stderr: +./calc.at:1476: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) @@ -182213,108 +174824,176 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... -./calc.at:1489: mv calc.y.tmp calc.y +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1477: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc -./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +stderr: +syntax error +syntax error +syntax error +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1482: cat stderr +./calc.at:1485: cat stderr +input: +stderr: + | 1//2 +./calc.at:1480: $PREPARSER ./calc input +input: +input: +input: + | 1//2 + | 1//2 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | 1 2 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +1.3: syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) Entering state 8 Reading a token Next token is token '/' () Shifting token '/' () Entering state 22 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1469: cat stderr +stderr: +stderr: +1.3: syntax error +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) @@ -182322,63 +175001,78 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +input: +./calc.at:1476: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -182389,18 +175083,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr -558. calc.at:1485: ok - -./calc.at:1489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... -./calc.at:1489: mv calc.y.tmp calc.y - -./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: +./calc.at:1477: cat stderr +./calc.at:1480: cat stderr stdout: -./calc.at:1482: "$PERL" -ne ' +./calc.at:1486: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -182411,7 +175098,29 @@ || /\t/ )' calc.cc +stderr: +input: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 + | 1 + 2 * 3 + !- ++ +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: $PREPARSER ./calc input +input: +./calc.at:1485: cat stderr + | error +./calc.at:1480: $PREPARSER ./calc input +input: +stderr: +./calc.at:1482: cat stderr input: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -182425,11 +175134,86 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1482: $PREPARSER ./calc input + | 1//2 +stderr: +stderr: +./calc.at:1486: $PREPARSER ./calc input +syntax error, unexpected invalid token +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | error +./calc.at:1485: $PREPARSER ./calc input +stderr: +stderr: +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected invalid token +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +stderr: +1.3: syntax error +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: cat stderr +stderr: +./calc.at:1480: cat stderr +Starting parse +Entering state 0 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 @@ -183263,7 +176047,30 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1469: cat stderr +./calc.at:1477: cat stderr + | error +input: + | (!!) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1480: $PREPARSER ./calc input +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -184102,9 +176909,48 @@ Cleanup: popping token "end of input" () Cleanup: popping nterm input () input: +syntax error +error: 2222 != 1 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +input: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: cat stderr +input: +syntax error, unexpected '=' + | (#) + (#) = 2222 +./calc.at:1469: $PREPARSER ./calc input + | error +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: $PREPARSER ./calc input | 1 2 -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input +stderr: +stderr: +stderr: +syntax error +error: 2222 != 1 +stderr: +stderr: +syntax error, unexpected '=' +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error stderr: +input: Starting parse Entering state 0 Reading a token @@ -184117,11 +176963,17 @@ Entering state 8 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Cleanup: discarding lookahead token "number" (2) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' + | 1 = 2 = 3 stderr: +./calc.at:1485: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -184134,10 +176986,10 @@ Entering state 8 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Cleanup: discarding lookahead token "number" (2) -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -184147,53 +176999,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -input: - | 1//2 -./calc.at:1482: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () +Cleanup: discarding lookahead token '=' () +1.1: syntax error ./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -184204,26 +177041,89 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -input: - | error -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1482: "$PERL" -pi -e 'use strict; +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1482: cat stderr +input: + | + | +1 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1476: cat stderr +./calc.at:1486: cat stderr +./calc.at:1469: cat stderr +./calc.at:1477: cat stderr +input: +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -184233,11 +177133,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -input: +stderr: | 1 = 2 = 3 +stderr: ./calc.at:1482: $PREPARSER ./calc input +syntax error, unexpected '+' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +input: +./calc.at:1489: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +input: +./calc.at:1485: cat stderr +stderr: stderr: + | (- *) + (1 2) = 1 + | 1 = 2 = 3 +syntax error, unexpected '+' Starting parse Entering state 0 Reading a token @@ -184267,20 +177189,15 @@ Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () + | (1 + #) = 1111 +./calc.at:1476: $PREPARSER ./calc input ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input stderr: -stdout: -./calc.at:1486: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - stderr: Starting parse Entering state 0 @@ -184311,7 +177228,16 @@ Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () +stderr: input: +syntax error: invalid character: '#' +stderr: +1.7: syntax error +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -184325,7 +177251,82 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1486: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +syntax error +error: 2222 != 1 +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -184336,8 +177337,71 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr +1.7: syntax error +syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +./calc.at:1480: cat stderr Starting parse Entering state 0 Reading a token @@ -185174,11 +178238,28 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: $PREPARSER ./calc /dev/null stderr: Starting parse Entering state 0 @@ -185656,82 +178737,2757 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 28 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (4) +Shifting token "number" (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (256) +Shifting token "number" (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (64) +Shifting token "number" (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: +stderr: +input: +./calc.at:1485: cat stderr +syntax error, unexpected end of input +./calc.at:1476: cat stderr +./calc.at:1486: cat stderr + | + | +1 +./calc.at:1482: $PREPARSER ./calc input + | 1 2 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +stderr: +./calc.at:1485: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1469: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +input: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) + | + | +1 +syntax error, unexpected end of input +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc input +input: +stderr: +input: + | error +stderr: +./calc.at:1486: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) + | (# + 1) = 1111 +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1469: $PREPARSER ./calc input +stderr: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +stderr: +stderr: +stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error +stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +stderr: +syntax error +syntax error +syntax error +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1485: cat stderr +./calc.at:1486: cat stderr +./calc.at:1477: cat stderr +./calc.at:1482: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1469: cat stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1485: $PREPARSER ./calc input +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1486: $PREPARSER ./calc input +stderr: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +stderr: +./calc.at:1482: $PREPARSER ./calc /dev/null +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stderr: +./calc.at:1476: cat stderr +1.1: syntax error +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 +stderr: +input: +./calc.at:1469: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1476: $PREPARSER ./calc input +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +./calc.at:1477: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +input: +input: +./calc.at:1485: cat stderr +./calc.at:1486: cat stderr +stderr: +syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +input: +input: +input: +./calc.at:1480: $PREPARSER ./calc input +stderr: + | (!!) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input + | + | +1 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1482: cat stderr +stderr: + | error +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error, unexpected number +error: 2222 != 1 +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +syntax error, unexpected number +error: 2222 != 1 +input: +./calc.at:1469: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1486: cat stderr +./calc.at:1477: cat stderr +./calc.at:1489: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +error: null divisor +./calc.at:1480: cat stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1486: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1476: cat stderr +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input +error: null divisor + | 1 = 2 = 3 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: + | (- *) + (1 2) = 1 +stderr: +stderr: +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () + | (#) + (#) = 2222 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1476: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1469: cat stderr +545. calc.at:1469: ok + +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1494: mv calc.y.tmp calc.y + +./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +stdout: +./calc.at:1479: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1477: cat stderr +./calc.at:1486: cat stderr +./calc.at:1480: cat stderr +./calc.at:1485: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1482: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1479: $PREPARSER ./calc input +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -185739,275 +181495,215 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: +./calc.at:1476: cat stderr +input: +./calc.at:1489: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (* *) + (*) + (*) + | (* *) + (*) + (*) +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +stderr: +stderr: + | (- *) + (1 2) = 1 +./calc.at:1477: $PREPARSER ./calc input +stdout: +./calc.at:1480: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +stderr: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (256) -Shifting token "number" (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) -Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token @@ -186016,211 +181712,122 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -input: stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -Starting parse -Entering state 0 +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1482: cat stderr -./calc.at:1482: $PREPARSER ./calc /dev/null -input: - | 1//2 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -stderr: -Starting parse -Entering state 0 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1482: cat stderr -input: - | error -./calc.at:1486: $PREPARSER ./calc input -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -stderr: +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -186229,7 +181836,7 @@ Entering state 4 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token ')' () @@ -186301,7 +181908,7 @@ Entering state 20 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' () Error: popping nterm exp (3) Shifting token error () @@ -186332,7 +181939,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -186403,7 +182010,7 @@ Entering state 21 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' () Error: popping nterm exp (2) Shifting token error () @@ -186468,8 +182075,42 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./calc.at:1478: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1489: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1476: $PREPARSER ./calc input + | 1 2 +./calc.at:1479: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 Starting parse Entering state 0 Reading a token @@ -186477,8 +182118,163 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () -syntax error +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +input: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1494: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +stderr: +stderr: +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +syntax error, unexpected number +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token ')' () @@ -186550,7 +182346,7 @@ Entering state 20 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' () Error: popping nterm exp (3) Shifting token error () @@ -186581,7 +182377,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -186652,7 +182448,7 @@ Entering state 21 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' () Error: popping nterm exp (2) Shifting token error () @@ -186717,7 +182513,54 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -186727,7 +182570,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: cat stderr +input: + | 1 2 +./calc.at:1478: $PREPARSER ./calc input +stderr: ./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -186738,74 +182584,139 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +syntax error, unexpected number input: - | 1 = 2 = 3 -./calc.at:1486: $PREPARSER ./calc input +1.3: syntax error + | 1 2 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected number +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: cat stderr +./calc.at:1480: cat stderr +stderr: +1.3: syntax error +input: +./calc.at:1485: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +syntax error, unexpected number +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1480: $PREPARSER ./calc input ./calc.at:1482: cat stderr +./calc.at:1486: cat stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 +./calc.at:1489: cat stderr +./calc.at:1479: cat stderr +./calc.at:1482: $PREPARSER ./calc input stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: +input: +input: | (!!) + (1 2) = 1 -./calc.at:1482: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () + | 1//2 +./calc.at:1486: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1489: $PREPARSER ./calc /dev/null stderr: Starting parse Entering state 0 @@ -186814,20 +182725,25 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -186918,18 +182834,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -186977,7 +182882,7 @@ Entering state 12 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -187041,64 +182946,82 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: cat stderr -input: - | - | +1 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: cat stderr -Starting parse -Entering state 0 +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1486: "$PERL" -pi -e 'use strict; +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1478: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +input: +./calc.at:1480: cat stderr +stderr: +stderr: +input: +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -187108,10 +183031,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error, unexpected '/', expecting number or '-' or '(' or '!' input: - | (- *) + (1 2) = 1 -./calc.at:1482: $PREPARSER ./calc input stderr: + | (# + 1) = 1111 +stderr: +./calc.at:1476: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -187119,25 +183044,20 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -187164,7 +183084,7 @@ Entering state 12 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -187228,9 +183148,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | 1//2 Starting parse Entering state 0 Reading a token @@ -187347,15 +183265,87 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: $PREPARSER ./calc /dev/null +./calc.at:1478: $PREPARSER ./calc input +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1480: $PREPARSER ./calc input stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token Now at end of input. syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -187363,6 +183353,101 @@ Now at end of input. syntax error, unexpected end of input Cleanup: discarding lookahead token "end of input" () + | 1 + 2 * 3 + !- ++ +./calc.at:1485: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' + | 1//2 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +stderr: +stderr: +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.3: syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -187373,6 +183458,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1477: cat stderr ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -187383,15 +183469,166 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1480: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error ./calc.at:1486: cat stderr input: +./calc.at:1489: cat stderr input: - | (* *) + (*) + (*) + | (#) + (#) = 2222 +./calc.at:1480: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input + | (- *) + (1 2) = 1 ./calc.at:1486: $PREPARSER ./calc input stderr: +./calc.at:1485: cat stderr +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr + | (* *) + (*) + (*) +./calc.at:1482: $PREPARSER ./calc input +stderr: +./calc.at:1479: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -187399,16 +183636,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -187429,12 +183672,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -187448,52 +183700,38 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -187508,8 +183746,6 @@ Cleanup: popping token "end of input" () Cleanup: popping nterm input () stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token @@ -187757,7 +183993,135 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +input: +input: ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 + | 1 + 2 * 3 + !- ++ +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -187873,6 +184237,14 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +input: +./calc.at:1478: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1476: cat stderr +./calc.at:1479: $PREPARSER ./calc input +input: +stderr: stderr: Starting parse Entering state 0 @@ -188121,7 +184493,331 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | error +./calc.at:1480: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +input: +stderr: +stderr: +stderr: + | error +./calc.at:1478: $PREPARSER ./calc input +syntax error, unexpected invalid token +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +syntax error, unexpected invalid token +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -188131,7 +184827,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1476: $PREPARSER ./calc input +stderr: +syntax error, unexpected invalid token +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected invalid token +./calc.at:1477: cat stderr +stderr: +1.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -188142,81 +184860,263 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1480: cat stderr +stderr: +stderr: +1.1: syntax error +syntax error: invalid character: '#' +input: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: ./calc.at:1486: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1482: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1477: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' + | (1 + #) = 1111 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1489: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1486: $PREPARSER ./calc input +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +stdout: stderr: +stderr: +syntax error: invalid character: '#' +./calc.at:1482: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +input: | (!!) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input +./calc.at:1479: cat stderr +./calc.at:1482: cat stderr +./calc.at:1489: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 +Reading a token Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1480: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1478: cat stderr + | (1 + #) = 1111 +./calc.at:1485: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -188329,76 +185229,105 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | 1 = 2 = 3 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1482: $PREPARSER ./calc input +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: stderr: +syntax error, unexpected '=' Starting parse Entering state 0 Reading a token @@ -188406,20 +185335,19 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -188437,21 +185365,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -188465,38 +185384,52 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -188510,7 +185443,6 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: Starting parse Entering state 0 Reading a token @@ -188546,37 +185478,828 @@ -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (7) +Shifting token "number" (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () +Entering state 2 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (5) +Shifting token "number" (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (4) +Shifting token "number" (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (256) +Shifting token "number" (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (64) +Shifting token "number" (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 + | 1 = 2 = 3 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -188635,40 +186358,14 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): $1 = token '!' () - $2 = token '-' () + $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1482: cat stderr -stderr: Starting parse Entering state 0 Reading a token @@ -188676,25 +186373,20 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -188785,50 +186477,86 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token "number" (7) +Shifting token "number" (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) +-> $$ = nterm exp (7) +Entering state 27 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -188836,604 +186564,747 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (5) +Shifting token "number" (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 27 +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -input: - | (#) + (#) = 2222 -./calc.at:1482: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (-4) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (-1) +Entering state 28 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (256) +Shifting token "number" (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -input: - | (1 + #) = 1111 -./calc.at:1482: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (4) Entering state 8 Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (64) +-> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (64) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token @@ -189443,17 +187314,16 @@ Cleanup: popping token "end of input" () Cleanup: popping nterm input () ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1477: cat stderr +./calc.at:1476: cat stderr +stderr: stderr: +stderr: +stderr: +input: +1.7: syntax error +syntax error, unexpected '=' +syntax error, unexpected '=' Starting parse Entering state 0 Reading a token @@ -189461,11 +187331,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -189487,7 +187357,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -189498,16 +187368,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -189516,35 +187386,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: $PREPARSER ./calc input + | 1 2 +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -189611,8 +187469,54 @@ $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1480: cat stderr + | (1 + #) = 1111 +./calc.at:1477: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1476: $PREPARSER ./calc input +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1482: $PREPARSER ./calc input +syntax error, unexpected '=' +stderr: +stderr: +input: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +1.7: syntax error + | (# + 1) = 1111 stderr: Starting parse Entering state 0 @@ -189671,21 +187575,81 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): $1 = token '!' () - $2 = token '+' () + $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1486: $PREPARSER ./calc input -input: - | (# + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1482: $PREPARSER ./calc input +stdout: +./calc.at:1485: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +stderr: +syntax error: invalid character: '#' +stderr: +error: null divisor +1.6: syntax error: invalid character: '#' +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1485: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -189751,80 +187715,403 @@ $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !+ ++ +./calc.at:1486: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +stderr: stderr: Starting parse Entering state 0 Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (7) +Shifting token number (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) Entering state 8 Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +-> $$ = nterm exp (1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1) + $2 = token '\n'Starting parse Entering state 0 Reading a token Next token is token "number" (1) @@ -189881,380 +188168,508 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): $1 = token '!' () - $2 = token '-' () + $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -stderr: -Starting parse -Entering state 0 + () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1486: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1482: $PREPARSER ./calc input -input: - | (#) + (#) = 2222 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) Entering state 8 -Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering s./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +tate 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (4) Entering state 8 Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: cat stderr +./calc.at:1480: cat stderr +./calc.at:1489: cat stderr +./calc.at:1479: cat stderr stderr: ./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -190266,512 +188681,950 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stdout: +stderr: +./calc.at:1487: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (7) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1486: cat stderr -input: -input: - | (1 + #) = 1111 -./calc.at:1486: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1482: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1) + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = nterm exp (2) + $2 = nterm exp (-1) $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 115): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (2) $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = nterm exp (0) + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor + $2 = token '=' () + $3 = nterm exp (2) -> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (256) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (4) Entering state 8 Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 116): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (64) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 83): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () stderr: +./calc.at:1482: cat stderr +stderr: +stdout: +./calc.at:1491: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () + $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Entering state 29 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +input: +./calc.at:1478: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190791,12 +189644,70 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: cat stderr -input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 + | + | +1 | (# + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1487: $PREPARSER ./calc input +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1491: $PREPARSER ./calc input +input: +input: +input: + | + | +1 + | 1 2 + | (#) + (#) = 2222 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !- ++ ./calc.at:1486: $PREPARSER ./calc input -./calc.at:1482: cat stderr stderr: +stderr: +stderr: +syntax error, unexpected '+' Starting parse Entering state 0 Reading a token @@ -190814,15 +189725,15 @@ Next token is token '+' () Error: discarding token '+' () Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -190833,16 +189744,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -190851,24 +189762,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -557. calc.at:1482: ok -stderr: Starting parse Entering state 0 Reading a token @@ -190876,18 +189784,21 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -190901,63 +189812,9 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1486: $PREPARSER ./calc input -stderr: -stderr: -stdout: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -190971,24 +189828,13 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +Next token is token "number" (2) +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -191000,32 +189846,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -191039,697 +189893,848 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1479: $PREPARSER ./calc input -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: cat stderr -input: - | 1 2 -./calc.at:1479: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1486: $PREPARSER ./calc input -stderr: -syntax error, unexpected number -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token ')' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token -Next token is token ')' () +Next token is token '-' (9.7: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -syntax error, unexpected number -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1491: mv calc.y.tmp calc.y - -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1479: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1//2 -./calc.at:1479: $PREPARSER ./calc input -stderr: -./calc.at:1486: cat stderr -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -560. calc.at:1486: ok -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1479: cat stderr -input: - | error -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected invalid token -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected invalid token -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected '=' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '=' -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | - | +1 -./calc.at:1479: $PREPARSER ./calc input -566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -stderr: -./calc.at:1491: mv calc.y.tmp calc.y - -syntax error, unexpected '+' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '+' -./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1491: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -./calc.at:1479: $PREPARSER ./calc /dev/null -stderr: -syntax error, unexpected end of input -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected end of input -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1479: $PREPARSER ./calc input -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1479: $PREPARSER ./calc input -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1485: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -syntax error: invalid character: '#' -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr input: - | (# + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr Starting parse Entering state 0 Reading a token @@ -192078,7 +191083,8 @@ Entering state 24 Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) - $2 = token '\n' () + $2 = token '\n'./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + () -> $$ = nterm line () Entering state 17 Reducing stack 0 by rule 2 (line 83): @@ -192566,8 +191572,1152 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +syntax error, unexpected '+' +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 16 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) stderr: +stderr: +2.1: syntax error Starting parse Entering state 0 Reading a token @@ -193404,278 +193554,64 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -input: - | 1 2 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: cat stderr -stderr: +syntax error, unexpected '+' Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -input: - | (1 + # + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -stderr: syntax error: invalid character: '#' -input: - | 1//2 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Error: discarding token '+' () Reading a token Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1479: cat stderr -input: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1479: $PREPARSER ./calc input -stderr: -error: null divisor -./calc.at:1485: cat stderr -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -error: null divisor - | error -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -./calc.at:1485: cat stderr -553. calc.at:1479: ok -input: - | 1 = 2 = 3 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Error: discarding token number (1) Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () - -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | - | +1 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 82): @@ -193683,219 +193619,73 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +input: +./calc.at:1482: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +./calc.at:1476: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1492: mv calc.y.tmp calc.y - -./calc.at:1485: $PREPARSER ./calc /dev/null -./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1485: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +stderr: +syntax error, unexpected '+' +547. calc.at:1476: ok +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -193904,52 +193694,19 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -193957,264 +193714,184 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (4444) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +input: +input: +2.1: syntax error + | (# + 1) = 1111 + | 1//2 +./calc.at:1477: $PREPARSER ./calc input + | 1 2 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1487: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' + +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Entering state 8 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) @@ -194222,34 +193899,12 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -194260,236 +193915,136 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1485: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1486: cat stderr +./calc.at:1489: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +Starting parse +Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +./calc.at:1479: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token number (2) -syntax error, unexpected number +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; +Cleanup: discarding lookahead token number (2) +./calc.at:1485: cat stderr +1.2: syntax error: invalid character: '#' +input: +./calc.at:1485: cat stderr +input: +./calc.at:1478: cat stderr +./calc.at:1479: $PREPARSER ./calc /dev/null +./calc.at:1480: $PREPARSER ./calc /dev/null + | (#) + (#) = 2222 +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -194499,10 +194054,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr + | (* *) + (*) + (*) +./calc.at:1489: $PREPARSER ./calc input input: - | (- *) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input +stderr: + | (1 + # + 1) = 1111 +./calc.at:1478: $PREPARSER ./calc /dev/null +./calc.at:1491: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1485: $PREPARSER ./calc input +input: +syntax error, unexpected end of input +./calc.at:1482: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1//2 +stderr: +syntax error, unexpected end of input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -194511,28 +194100,19 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194547,28 +194127,19 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194576,7 +194147,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -194586,42 +194157,61 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr +./calc.at:1480: cat stderr +stderr: +stderr: stderr: +1.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +syntax error, unexpected end of input Starting parse Entering state 0 Reading a token @@ -194629,42 +194219,6 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 @@ -194673,13 +194227,24 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -194691,40 +194256,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -194738,21 +194295,6 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1485: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -194775,7 +194317,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194802,7 +194344,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194810,7 +194352,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -194836,7 +194378,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194844,7 +194386,7 @@ Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -194853,23 +194395,28 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1491: $PREPARSER ./calc input +syntax error, unexpected end of input Starting parse Entering state 0 Reading a token @@ -194877,22 +194424,19 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194907,100 +194451,69 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1492: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1485: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -195012,200 +194525,29 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) input: - | 1 + 2 * 3 + !- ++ -./calc.at:1485: $PREPARSER ./calc input stderr: +./calc.at:1477: cat stderr +1.1: syntax error + | (1 + #) = 1111 Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) @@ -195213,117 +194555,26 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +Entering state 12 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1485: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Error: discarding token '+' () Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 @@ -195335,39 +194586,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -195381,7 +194625,9 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -195390,19 +194636,22 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195417,70 +194666,131 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () + | (1 + 1) / (1 - 1) +./calc.at:1482: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1480: $PREPARSER ./calc input +input: +input: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -195490,11 +194800,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1485: $PREPARSER ./calc input +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + # + 1) = 1111 +./calc.at:1487: $PREPARSER ./calc input +stderr: +./calc.at:1477: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -195502,11 +194833,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195528,7 +194859,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195539,16 +194870,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -195557,29 +194888,63 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1480: cat stderr stderr: +./calc.at:1479: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) + | error +stderr: +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) @@ -195587,70 +194952,19 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () +Next token is token '/' () +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1482: $PREPARSER ./calc input +stderr: ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -195661,11 +194975,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1486: cat stderr Starting parse Entering state 0 Reading a token @@ -195673,25 +194986,33 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195702,16 +195023,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -195720,92 +195041,57 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +stderr: +./calc.at:1478: cat stderr ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -195816,10 +195102,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1480: $PREPARSER ./calc input ./calc.at:1485: cat stderr +stderr: +stderr: + | (1 + #) = 1111 +./calc.at:1489: cat stderr +./calc.at:1486: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1479: $PREPARSER ./calc input +stderr: +input: +./calc.at:1491: cat stderr +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1478: $PREPARSER ./calc input +input: input: - | (1 + # + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -195828,11 +195160,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195849,18 +195181,12 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195871,16 +195197,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -195889,23 +195215,98 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: $PREPARSER ./calc input stderr: +input: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 + 2 * 3 + !+ ++ +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1485: cat stderr +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 + | error +./calc.at:1491: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -195913,11 +195314,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195934,18 +195335,12 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195956,16 +195351,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -195974,36 +195369,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1485: $PREPARSER ./calc input -stderr: +./calc.at:1480: cat stderr Starting parse Entering state 0 Reading a token @@ -196121,6 +195502,119 @@ Cleanup: popping nterm input () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +input: +input: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + | error +./calc.at:1482: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1477: cat stderr +./calc.at:1487: $PREPARSER ./calc input +stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +554. calc.at:1480: stderr: + ok +570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +stderr: Starting parse Entering state 0 Reading a token @@ -196236,53 +195730,15 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -559. calc.at:1485: ok - -568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1492: mv calc.y.tmp calc.y - -./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1482: cat stderr stderr: -stdout: -./calc.at:1489: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token invalid token () Starting parse Entering state 0 Reading a token @@ -196318,44 +195774,149 @@ -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1494: mv calc.y.tmp calc.y + + | (# + 1) = 1111 +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1480: cat stderr +input: +input: +./calc.at:1479: cat stderr + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1489: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token invalid token () +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1486: cat stderr +./calc.at:1477: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) --> $$ = nterm exp (7) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -196364,6 +195925,25 @@ -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +stderr: +Starting parse +Entering state 0 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 @@ -196388,181 +195968,158 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token "number" (3) Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (3) -> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) Entering state 30 -Next token is token '=' () +Reading a token +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (5) -Shifting token "number" (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +1.11-17: error: null divisor +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: $PREPARSER ./calc input +stderr: +input: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1480: $PREPARSER ./calc input + +stderr: +input: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Error: discarding token '+' () Reading a token Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +Error: discarding token "number" (1) Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1478: cat stderr +stderr: +./calc.at:1491: cat stderr + | (# + 1) = 1111 +./calc.at:1486: $PREPARSER ./calc input +stderr: +stdout: +stderr: +1.11-17: error: null divisor +./calc.at:1485: cat stderr +Starting parse +Entering state 0 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -196570,27 +196127,11 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token "number" (2) Shifting token "number" (2) @@ -196598,72 +196139,63 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +Starting parse +Entering state 0 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -196671,171 +196203,131 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token Next token is token "number" (2) Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 28 +Entering state 27 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +stderr: +input: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr +stderr: + | (!!) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input +stderr: +syntax error, unexpected number +error: 2222 != 1 +Starting parse +Entering state 0 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +input: +stderr: +input: +stderr: + | 1 = 2 = 3 +./calc.at:1491: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 @@ -196844,13 +196336,9 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token "number" (2) Shifting token "number" (2) @@ -196858,277 +196346,344 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 12 +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +stderr: +stderr: +syntax error, unexpected number +error: 2222 != 1 + | 1 = 2 = 3 +syntax error, unexpected number +error: 2222 != 1 +input: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +558. calc.at:1485: ./calc.at:1485: cat stderr +stderr: +./calc.at:1487: $PREPARSER ./calc input + ok + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 27 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '=' () +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +input: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 +Entering state 27 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '=' () +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +./calc.at:1477: cat stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) + | 1 = 2 = 3 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr + +input: + | (#) + (#) = 2222 +./calc.at:1489: $PREPARSER ./calc input +stderr: +input: +./calc.at:1482: cat stderr +549. calc.at:1477: ./calc.at:1486: cat stderr + ok +input: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -197136,11 +196691,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -197148,11 +196703,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token @@ -197174,11 +196729,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token @@ -197202,11 +196757,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -197214,11 +196769,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -197230,11 +196785,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token @@ -197266,11 +196821,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token @@ -197318,11 +196873,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -197330,11 +196885,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -197359,11 +196914,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -197402,11 +196957,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -197430,11 +196985,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -197449,11 +197004,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -197503,11 +197058,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -197537,11 +197092,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -197585,11 +197140,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -197597,11 +197152,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token @@ -197616,11 +197171,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -197639,11 +197194,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token @@ -197674,11 +197229,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -197690,11 +197245,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -197702,11 +197257,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token @@ -197738,11 +197293,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -197780,11 +197335,11 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -197792,11 +197347,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -197804,11 +197359,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -197830,11 +197385,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token @@ -197863,11 +197418,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -197875,11 +197430,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -197904,11 +197459,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token @@ -197923,11 +197478,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token @@ -197953,104 +197508,17 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -input: - | 1 2 -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -input: - | 1//2 -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1489: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1487: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -198060,48 +197528,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr input: - | error -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr +./calc.at:1478: cat stderr input: - | 1 = 2 = 3 -./calc.at:1489: $PREPARSER ./calc input + | (- *) + (1 2) = 1 stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -198109,11 +197549,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -198123,16 +197563,16 @@ Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -198140,11 +197580,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token @@ -198154,353 +197594,11 @@ Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -input: - | - | +1 -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -./calc.at:1489: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1489: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -198508,188 +197606,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -198701,96 +197623,6 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () @@ -198801,21 +197633,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -198840,20 +197663,19 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -198876,6 +197698,7 @@ Cleanup: popping nterm input () ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -198883,37 +197706,6 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 @@ -198922,262 +197714,24 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Error: discarding token '+' () Reading a token Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Error: discarding token "number" (1) Reading a token Next token is token ')' () Entering state 11 @@ -199189,40 +197743,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -199236,35 +197782,10 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -stdout: -./calc.at:1486: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 + | (1 + # + 1) = 1111 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: $PREPARSER ./calc input -stderr: +input: Starting parse Entering state 0 Reading a token @@ -200101,92 +198622,98 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1489: cat stderr +stderr: +./calc.at:1491: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 + | + | +1 Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 +Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -200195,205 +198722,224 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1487: $PREPARSER ./calc input +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 + | 1 2 +stderr: +./calc.at:1486: $PREPARSER ./calc input +stderr: +stderr: + | (- *) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +input: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) @@ -200401,100 +198947,226 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 8 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) + | + | +1 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Next token is token '+' () +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | + | +1 +./calc.at:1482: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) @@ -200502,248 +199174,385 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 8 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +./calc.at:1480: cat stderr +stderr: +input: +stderr: +./calc.at:1487: cat stderr +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) + | (1 + #) = 1111 +./calc.at:1482: cat stderr +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1487: $PREPARSER ./calc /dev/null + | (* *) + (*) + (*) +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1482: $PREPARSER ./calc input +stderr: +./calc.at:1478: cat stderr +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +571. calc.at:1504: testing Calculator lalr1.d ... +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +Starting parse +Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Now at end of input. +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token end of file () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: cat stderr +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: + | + | +1 +./calc.at:1485: $PREPARSER ./calc input +input: +./calc.at:1486: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token Next token is token '-' () Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 28 Reading a token Next token is token ')' () Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) + $1 = nterm exp (1) $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) + $3 = nterm exp (1) +-> $$ = nterm exp (0) Entering state 12 Next token is token ')' () Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = nterm exp (-1) + $2 = nterm exp (0) $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 10 (line 93): $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor -> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () @@ -200753,210 +199562,437 @@ $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + | (* *) + (*) + (*) +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token end of file () +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | (* *) + (*) + (*) +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1491: cat stderr +stderr: +input: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' + | (1 + 1) / (1 - 1) +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +stdout: +./calc.at:1486: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) -> $$ = nterm exp (2) -Entering state 32 +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: $PREPARSER ./calc /dev/null +./calc.at:1489: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1482: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) -> $$ = nterm exp (2) -Entering state 32 +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) Entering state 12 Next token is token ')' () Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = nterm exp (4) + $2 = nterm exp (0) $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | 1 2 -./calc.at:1486: $PREPARSER ./calc input -input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 stderr: - | (* *) + (*) + (*) ./calc.at:1489: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) Starting parse Entering state 0 Reading a token @@ -200968,11 +200004,28 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) +Cleanup: discarding lookahead token '/' () stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1487: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr Starting parse Entering state 0 Reading a token @@ -200980,101 +200033,101 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (0) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -201088,9 +200141,47 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +572. calc.at:1509: testing Calculator D ... +input: +./calc.at:1482: cat stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -201098,42 +200189,18 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Error: discarding token '+' () Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () Entering state 11 @@ -201145,54 +200212,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) -Entering state 29 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -201206,23 +200251,10 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1485: cat stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +556. calc.at:1482: ./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -201232,336 +200264,258 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr -input: | 1 + 2 * 3 + !+ ++ -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr +./calc.at:1480: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -./calc.at:1486: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '*' () -Shifting token '*' () -Entering state 21 +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 +Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 +Entering state 30 Reading a token Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -201572,7 +200526,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -201582,23 +200536,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1489: cat stderr -input: - | (#) + (#) = 2222 -input: -./calc.at:1489: $PREPARSER ./calc input - | error -./calc.at:1486: $PREPARSER ./calc input +./calc.at:1485: $PREPARSER ./calc /dev/null stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -201606,19 +200568,14 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -201633,105 +200590,120 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token @@ -201739,19 +200711,52 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '*' () +Error: popping nterm exp (2) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -201759,102 +200764,54 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () stderr: -stdout: -./calc.at:1487: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1487: $PREPARSER ./calc input +stderr: ./calc.at:1486: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1489: $PREPARSER ./calc input +./calc.at:1479: cat stderr stderr: Starting parse Entering state 0 @@ -201863,27 +200820,19 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -201933,15 +200882,13 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -201953,7 +200900,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -201965,20 +200912,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -201991,13 +200938,13 @@ Next token is token number (7) Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -202006,12 +200953,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 @@ -202019,7 +200966,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -202031,7 +200978,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -202047,26 +200994,26 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -202083,19 +201030,19 @@ Next token is token number (5) Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -202104,12 +201051,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202118,11 +201065,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202135,7 +201082,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 @@ -202147,20 +201094,20 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -202176,19 +201123,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -202197,12 +201144,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202219,13 +201166,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -202233,7 +201180,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -202247,13 +201194,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -202266,13 +201213,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -202281,12 +201228,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202295,11 +201242,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202320,25 +201267,25 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -202354,19 +201301,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -202375,12 +201322,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202389,11 +201336,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202402,7 +201349,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -202414,13 +201361,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -202433,13 +201380,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -202456,19 +201403,19 @@ Next token is token number (4) Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -202477,12 +201424,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202491,7 +201438,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -202507,7 +201454,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -202519,13 +201466,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -202534,7 +201481,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -202542,7 +201489,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -202555,13 +201502,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -202570,12 +201517,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202584,11 +201531,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202597,7 +201544,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 @@ -202609,7 +201556,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 @@ -202621,20 +201568,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -202647,13 +201594,13 @@ Next token is token number (256) Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -202662,12 +201609,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202680,7 +201627,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -202692,13 +201639,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -202707,7 +201654,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -202721,13 +201668,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -202740,13 +201687,13 @@ Next token is token number (64) Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -202755,24 +201702,37 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1486: cat stderr +./calc.at:1491: cat stderr + | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !+ ++ +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +input: stderr: +573. calc.at:1510: testing Calculator D %locations ... +./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y + | 1 + 2 * 3 + !+ ++ +./calc.at:1479: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -202780,7 +201740,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -202792,7 +201752,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -202804,20 +201764,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -202830,13 +201790,13 @@ Next token is token number (7) Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -202845,12 +201805,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 @@ -202858,7 +201818,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -202870,7 +201830,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -202886,26 +201846,26 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -202922,19 +201882,19 @@ Next token is token number (5) Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -202943,12 +201903,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202957,11 +201917,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -202974,7 +201934,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 @@ -202986,20 +201946,20 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -203015,19 +201975,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -203036,12 +201996,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203058,13 +202018,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -203072,7 +202032,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -203086,13 +202046,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -203105,13 +202065,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -203120,12 +202080,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203134,11 +202094,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203159,25 +202119,25 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -203193,19 +202153,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -203214,12 +202174,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203228,11 +202188,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203241,7 +202201,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -203253,13 +202213,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -203272,13 +202232,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -203295,19 +202255,19 @@ Next token is token number (4) Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -203316,12 +202276,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203330,7 +202290,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -203346,7 +202306,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -203358,13 +202318,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -203373,7 +202333,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -203381,7 +202341,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -203394,13 +202354,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -203409,12 +202369,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203423,11 +202383,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203436,7 +202396,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 @@ -203448,7 +202408,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 @@ -203460,20 +202420,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -203486,13 +202446,13 @@ Next token is token number (256) Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -203501,12 +202461,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203519,7 +202479,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -203531,13 +202491,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -203546,7 +202506,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -203560,13 +202520,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -203579,13 +202539,13 @@ Next token is token number (64) Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -203594,219 +202554,49 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -input: -input: - | 1 2 - | 1 = 2 = 3 -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) +./calc.at:1482: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () stderr: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +560. calc.at:1486: ok stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: cat stderr -./calc.at:1486: cat stderr input: - | 1//2 -./calc.at:1487: $PREPARSER ./calc input input: - | - | +1 -./calc.at:1486: $PREPARSER ./calc input +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1487: cat stderr + | 1 2 stderr: +input: + + | error +./calc.at:1489: $PREPARSER ./calc input ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -203817,49 +202607,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1486: $PREPARSER ./calc input +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +stderr: +./calc.at:1482: $PREPARSER ./calc input +input: stderr: Starting parse Entering state 0 @@ -203867,712 +202628,328 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '/' () +Next token is token number (2) +syntax error, unexpected number Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -input: - | (# + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () +Cleanup: discarding lookahead token number (2) ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1487: cat stderr -./calc.at:1486: cat stderr -./calc.at:1486: $PREPARSER ./calc /dev/null input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | error -./calc.at:1487: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -./calc.at:1489: cat stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token Next token is token invalid token () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -input: + | 1 + 2 * 3 + !- ++ +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + # + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token invalid token () stderr: -Starting parse +input: +571. calc.at:1504: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr -./calc.at:1487: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1486: $PREPARSER ./calc input +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) stderr: -input: - | 1 = 2 = 3 +stdout: +./calc.at:1479: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1489: cat stderr + | (!!) + (1 2) = 1 + ./calc.at:1487: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: + skipped (calc.at:1504) Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.17: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) Entering state 20 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.20: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '+' (1.30: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 30 Reading a token -Next token is token '*' () +Next token is token '*' (1.39: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) Entering state 21 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -204581,7 +202958,7 @@ Entering state 4 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token ')' () @@ -204602,11 +202979,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -204614,11 +202991,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token @@ -204633,11 +203010,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token @@ -204653,7 +203030,7 @@ Entering state 20 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Error: popping token '+' () Error: popping nterm exp (3) Shifting token error () @@ -204684,7 +203061,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -204723,11 +203100,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -204735,11 +203112,11 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 30 Reading a token @@ -204755,7 +203132,7 @@ Entering state 21 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Error: popping token '*' () Error: popping nterm exp (2) Shifting token error () @@ -204786,11 +203163,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token @@ -204816,52 +203193,11 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -204871,176 +203207,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1487: cat stderr -input: -input: -./calc.at:1486: cat stderr - | - | +1 -./calc.at:1487: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1489: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () stderr: - | (!!) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse @@ -205057,7 +203232,7 @@ Next token is token '!' () Shifting token '!' () Entering state 15 -Reducing stack 0 by rule 16 (line 107): +Reducing stack 0 by rule 16 (line 120): $1 = token '!' () $2 = token '!' () Shifting token error () @@ -205066,7 +203241,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -205084,13 +203259,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token Next token is token number (2) -syntax error, unexpected number +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -205102,7 +203277,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -205110,7 +203285,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -205123,13 +203298,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -205139,781 +203314,291 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: cat stderr -./calc.at:1487: $PREPARSER ./calc /dev/null -stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token end of file () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token end of file () -./calc.at:1489: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1487: cat stderr -563. calc.at:1489: ok -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | (- *) + (1 2) = 1 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1487: $PREPARSER ./calc input - -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) Entering state 20 Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 30 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) Entering state 21 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1485: cat stderr +input: +input: + +./calc.at:1480: cat stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1479: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -205923,141 +203608,20 @@ Entering state 4 Reading a token Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '+' () -Error: popping nterm exp (3) +syntax error Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token @@ -206065,252 +203629,61 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1487: $PREPARSER ./calc input -input: - | (* *) + (*) + (*) -./calc.at:1486: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -Starting parse -Entering state 0 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -206320,9 +203693,16 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token @@ -206331,12 +203711,18 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -206351,10 +203737,10 @@ Reading a token Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '+' () Shifting token '+' () @@ -206364,8 +203750,41 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) Shifting token error () Entering state 11 Next token is token '*' () @@ -206383,18 +203802,38 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '\n' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) + $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -206404,14 +203843,20 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -206523,6 +203968,36 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () +input: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1485: $PREPARSER ./calc input +stderr: +stderr: +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr Starting parse Entering state 0 Reading a token @@ -206530,42 +204005,32 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () Entering state 11 @@ -206577,54 +204042,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) -Entering state 29 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -206634,12 +204077,22 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1487: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -206649,11 +204102,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (- *) + (1 2) = 1 -./calc.at:1487: $PREPARSER ./calc input ./calc.at:1486: cat stderr stderr: +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -206661,189 +204129,83 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1486: $PREPARSER ./calc input -569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -stderr: -stderr: -./calc.at:1494: mv calc.y.tmp calc.y - -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -206851,23 +204213,9 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -206895,298 +204243,53 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -input: - | (* *) + (*) + (*) -./calc.at:1487: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -207196,9 +204299,16 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token @@ -207207,12 +204317,18 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -207227,10 +204343,10 @@ Reading a token Next token is token '+' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '+' () Shifting token '+' () @@ -207240,8 +204356,41 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) Shifting token error () Entering state 11 Next token is token '*' () @@ -207259,18 +204408,38 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '\n' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) + $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -207284,18 +204453,23 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1487: cat stderr +input: +input: +input: +input: + | 1//2 stderr: +./calc.at:1489: $PREPARSER ./calc input + | 1 2 +./calc.at:1479: $PREPARSER ./calc input + | 1 = 2 = 3 +input: + | (#) + (#) = 2222 Starting parse Entering state 0 Reading a token @@ -207303,17 +204477,9 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -207333,14 +204499,61 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -207368,12 +204581,18 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -207386,42 +204605,20 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '\n' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1486: cat stderr -./calc.at:1487: cat stderr -input: - | (#) + (#) = 2222 -input: -./calc.at:1486: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1487: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) @@ -207429,11 +204626,11 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token number (2) Shifting token number (2) @@ -207441,99 +204638,34 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' () +Next token is token '*' () Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -207541,50 +204673,56 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1482: cat stderr +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1479: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -207592,234 +204730,76 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2222) -Shifting token number (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () - | 1 + 2 * 3 + !- ++ -./calc.at:1487: $PREPARSER ./calc input stderr: + | (!!) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input +syntax error, unexpected number +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +572. calc.at:1509: ./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -207829,6 +204809,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +stderr: +stderr: +input: stderr: Starting parse Entering state 0 @@ -207836,340 +204825,310 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Entering state 27 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () + skipped (calc.at:1509) +syntax error, unexpected number +Starting parse +Entering state 0 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1486: cat stderr -input: -./calc.at:1487: cat stderr - | (1 + #) = 1111 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' () +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (#) + (#) = 2222 -./calc.at:1487: $PREPARSER ./calc input +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1480: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' + | (- *) + (1 2) = 1 + | (!!) + (1 2) = 1 +./calc.at:1489: cat stderr +./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +573. calc.at:1510: ./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: stderr: + skipped (calc.at:1510) +./calc.at:1485: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token ')' () +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' () +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: + | (1 + #) = 1111 +./calc.at:1480: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -208177,19 +205136,23 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -208204,19 +205167,28 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -208224,7 +205196,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -208234,45 +205206,44 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2222) + $3 = nterm exp (1) +error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1487: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) stderr: +./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -208280,146 +205251,37 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () - | (1 + #) = 1111 -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1494: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -208433,18 +205295,13 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -208456,53 +205313,18 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) - $2 = token '=' () + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (1) Shifting token number (1) @@ -208510,57 +205332,21 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -208574,6 +205360,19 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1478: cat stderr ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -208584,15 +205383,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1487: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1486: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1487: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -208600,35 +205414,19 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 @@ -208639,149 +205437,29 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -208793,32 +205471,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -208828,10 +205514,16 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () + +input: + | (!!) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input +stderr: +./calc.at:1479: cat stderr stderr: Starting parse Entering state 0 @@ -208840,18 +205532,21 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -208865,64 +205560,9 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: cat stderr -./calc.at:1486: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1487: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -208936,24 +205576,13 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -208965,32 +205594,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -209004,9 +205641,7 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./calc.at:1479: cat stderr Starting parse Entering state 0 Reading a token @@ -209014,11 +205649,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -209026,11 +205661,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token @@ -209059,11 +205694,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -209071,11 +205706,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 28 Reading a token @@ -209118,11 +205753,32 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +input: +./calc.at:1489: cat stderr + | (1 + #) = 1111 +input: +./calc.at:1478: $PREPARSER ./calc input +stderr: +574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose ... +input: Starting parse Entering state 0 Reading a token @@ -209130,6 +205786,37 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 @@ -209138,24 +205825,13 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +Next token is token number (2) +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -209167,32 +205843,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -209206,7 +205890,13 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1487: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y + | 1//2 +./calc.at:1479: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1479: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -209215,11 +205905,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -209227,11 +205917,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token @@ -209260,11 +205950,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -209272,11 +205962,11 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 28 Reading a token @@ -209319,14 +206009,19 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () +stderr: input: - | (1 + 1) / (1 - 1) -./calc.at:1487: $PREPARSER ./calc input + | error +./calc.at:1489: $PREPARSER ./calc input stderr: +stderr: +./calc.at:1491: cat stderr +input: +575. calc.at:1514: testing Calculator D %debug ... Starting parse Entering state 0 Reading a token @@ -209334,46 +206029,32 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -209387,126 +206068,36 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token ')' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (1) Shifting token number (1) @@ -209514,38 +206105,21 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -209559,7 +206133,23 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1487: cat stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -209569,1185 +206159,779 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: cat stderr -./calc.at:1487: cat stderr -561. calc.at:1486: ok -562. calc.at:1487: ok - - -stderr: -570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -stdout: -./calc.at:1491: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1494: mv calc.y.tmp calc.y - -input: -./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1491: $PREPARSER ./calc input + | +1 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -stdout: -./calc.at:1489: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -571. calc.at:1504: testing Calculator lalr1.d ... -./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +stderr: +./calc.at:1486: $PREPARSER ./calc input input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1489: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' + | (- *) + (1 2) = 1 +stderr: +input: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1491: $PREPARSER ./calc input +576. calc.at:1516: testing Calculator D parse.error=custom ... Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1480: cat stderr + | (* *) + (*) + (*) +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +stderr: +./calc.at:1482: cat stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) Entering state 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | (# + 1) = 1111 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: $PREPARSER ./calc input +stderr: + | (- *) + (1 2) = 1 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +./calc.at:1489: cat stderr +./calc.at:1479: cat stderr +input: +stderr: +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) Entering state 27 +Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr +./calc.at:1479: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | error +stderr: + | 1 = 2 = 3 +stderr: +./calc.at:1479: $PREPARSER ./calc input +stderr: +input: +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -210756,188 +206940,245 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) + $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) --> $$ = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +563. calc.at:1489: ./calc.at:1489: $PREPARSER ./calc input + ok +syntax error: invalid character: '#' +input: +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) Entering state 27 +Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1491: cat stderr +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1486: cat stderr +./calc.at:1487: cat stderr + | (# + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input +stderr: +syntax error, unexpected invalid token +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) @@ -210947,9 +207188,9 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (2) Shifting token number (2) @@ -210957,182 +207198,155 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +Entering state 27 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '-' () Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1486: $PREPARSER ./calc /dev/null +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +input: +syntax error, unexpected invalid token +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (2) Shifting token number (2) @@ -211140,188 +207354,284 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () + | 1 + 2 * 3 + !+ ++ +syntax error: invalid character: '#' +./calc.at:1487: $PREPARSER ./calc input +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +input: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: + | (* *) + (*) + (*) +./calc.at:1491: $PREPARSER ./calc input +syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 29 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) -Entering state 32 +Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) - $2 = token '^' () + $2 = token '*' () $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1480: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +577. calc.at:1517: testing Calculator D %locations parse.error=custom ... Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -211333,7 +207643,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -211345,61 +207655,95 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) -> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1489: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +574. calc.at:1512: ./calc.at:1482: cat stderr +input: +input: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + skipped (calc.at:1512) + | 1 + 2 * 3 + !- ++ +./calc.at:1487: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +578. calc.at:1518: testing Calculator D %locations parse.error=detailed ... +./calc.at:1480: $PREPARSER ./calc input +input: +./calc.at:1479: cat stderr +./calc.at:1479: cat stderr + | (* *) + (*) + (*) +input: + | (* *) + (*) + (*) +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1478: cat stderr +./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1482: $PREPARSER ./calc input +input: +stderr: +stderr: +stderr: +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -211411,7 +207755,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -211420,654 +207764,1034 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | + | +1 +./calc.at:1489: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + | 1 = 2 = 3 +575. calc.at:1514: stderr: +./calc.at:1479: $PREPARSER ./calc input +stderr: +stderr: +input: +input: +stderr: +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1514) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' + | (1 + # + 1) = 1111 + | (1 + # + 1) = 1111 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +576. calc.at:1516: ./calc.at:1479: $PREPARSER ./calc input +stderr: +stderr: +stderr: +syntax error, unexpected '=' +stderr: +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1516) +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token Next token is token '\n' () Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +stderr: +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' +syntax error, unexpected '=' +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: cat stderr + +./calc.at:1487: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1480: cat stderr +syntax error: invalid character: '#' +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1491: cat stderr +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1489: cat stderr +stderr: +./calc.at:1485: cat stderr +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc /dev/null + | (1 + 1) / (1 - 1) +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1479: cat stderr +./calc.at:1482: cat stderr + | (#) + (#) = 2222 +./calc.at:1487: $PREPARSER ./calc input +stderr: +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token number (2) Shifting token number (2) @@ -212075,74 +208799,77 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 30 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () $3 = nterm exp (2) --> $$ = nterm exp (4) +-> $$ = nterm exp (2) Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token @@ -212151,6 +208878,18 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ Starting parse Entering state 0 Reading a token @@ -212186,830 +208925,569 @@ -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1485: $PREPARSER ./calc input +error: null divisor +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | + | +1 +./calc.at:1478: cat stderr +./calc.at:1479: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (input: -5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +syntax error, unexpected '+' +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) - | 1 2 -./calc.at:1489: $PREPARSER ./calc input -input: - | 1 2 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of file () +Cleanup: popping nterm input () stderr: stderr: Starting parse @@ -213023,42 +209501,138 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | 1 + 2 * 3 + !+ ++ +./calc.at:1482: $PREPARSER ./calc input +input: Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1478: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1491: $PREPARSER ./calc input +syntax error, unexpected '+' +stderr: +input: +stderr: ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -213069,114 +209643,76 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -571. calc.at:1504: ./calc.at:1491: cat stderr - skipped (calc.at:1504) -input: -input: - | 1//2 - | 1//2 -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1491: $PREPARSER ./calc input -stderr: - Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -Starting parse -Entering state 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + | 1 + 2 * 3 + !- ++ +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -213186,7 +209722,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -213196,56 +209733,77 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +577. calc.at:1517: stderr: +./calc.at:1487: cat stderr ./calc.at:1489: cat stderr -./calc.at:1491: cat stderr -input: -input: - | error - | error -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1489: $PREPARSER ./calc input -stderr: stderr: + skipped (calc.at:1517) Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Starting parse -Entering state 0 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -213255,13 +209813,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 = 2 = 3 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1489: cat stderr -572. calc.at:1509: testing Calculator D ... -./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: +578. calc.at:1518: stderr: Starting parse Entering state 0 Reading a token @@ -213273,9 +209825,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -213283,249 +209835,199 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 27 +Entering state 29 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: - | 1 = 2 = 3 -./calc.at:1489: $PREPARSER ./calc input +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) + $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 27 +Entering state 29 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1479: cat stderr + skipped (calc.at:1518) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +input: +579. calc.at:1519: testing Calculator D %locations parse.error=simple ... stderr: +./calc.at:1479: cat stderr +input: Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -input: - | - | +1 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Entering state 29 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1489: cat stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1491: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -Starting parse -Entering state 0 +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1489: "$PERL" -pi -e 'use strict; +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: + + | (1 + #) = 1111 + | 1 + 2 * 3 + !- ++ +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -213535,7 +210037,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; +input: +./calc.at:1487: $PREPARSER ./calc input +stderr: +./calc.at:1479: $PREPARSER ./calc /dev/null +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -213545,537 +210051,381 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -572. calc.at:1509: ./calc.at:1489: cat stderr -./calc.at:1489: $PREPARSER ./calc /dev/null - skipped (calc.at:1509) -./calc.at:1491: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1491: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () - -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1489: $PREPARSER ./calc input +stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1479: $PREPARSER ./calc input +stderr: +./calc.at:1486: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.42: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected end of input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +555. calc.at:1480: ok Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token "number" (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: null divisor +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose ... +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.28: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1478: cat stderr +input: +syntax error, unexpected end of input +./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: cat stderr +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -214086,14 +210436,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1491: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1489: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input -stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -214341,121 +210693,26 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -stderr: ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input stderr: +error: null divisor +551. calc.at:1478: + ok +./calc.at:1491: cat stderr +./calc.at:1485: cat stderr +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -214703,129 +210960,7 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -214835,17 +210970,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr -input: -./calc.at:1491: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1489: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input +581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose ... stderr: -573. calc.at:1510: testing Calculator D %locations ... -./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1479: cat stderr +./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed ... Starting parse Entering state 0 Reading a token @@ -214957,125 +211086,31 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + +./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: cat stderr + | (#) + (#) = 2222 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1482: cat stderr + | (#) + (#) = 2222 +input: +./calc.at:1485: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -215187,150 +211222,127 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1479: cat stderr stderr: +stderr: +stderr: + | (#) + (#) = 2222 +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.5: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1489: cat stderr +Cleanup: popping token end of file () +Cleanup: popping nterm input () input: - | (* *) + (*) + (*) -./calc.at:1491: $PREPARSER ./calc input -stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -215338,102 +211350,84 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) Shifting token error (1.2: ) Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.5: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.9: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -215446,127 +211440,116 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (- *) + (1 2) = 1 -./calc.at:1489: $PREPARSER ./calc input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1489: cat stderr ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.5: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token number (2222) +Shifting token number (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of file () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -215574,21 +211557,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -215610,21 +211584,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -215649,20 +211614,19 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -215679,40 +211643,131 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse +input: + | (!!) + (1 2) = 1 +552. calc.at:1479: Starting parse Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + ok +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () @@ -215801,172 +211856,6 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1489: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -573. calc.at:1510: | (* *) + (*) + (*) -./calc.at:1489: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - skipped (calc.at:1510) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1491: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -215974,15 +211863,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -216004,12 +211890,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -216023,69 +211909,133 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: stderr: +./calc.at:1486: cat stderr Starting parse Entering state 0 Reading a token @@ -216093,19 +212043,20 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -216123,12 +212074,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -216142,52 +212102,38 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -216201,299 +212147,8 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -./calc.at:1491: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1489: $PREPARSER ./calc input -input: -stderr: - | (#) + (#) = 2222 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +./calc.at:1479: cat stderr +./calc.at:1485: cat stderr stderr: Starting parse Entering state 0 @@ -216593,268 +212248,665 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) input: - | 1 + 2 * 3 + !- ++ -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +579. calc.at:1519: skipped (calc.at:1519) + | (!!) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input +583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom ... +./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y + +585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union ... +./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace ... +./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed ... +./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1487: cat stderr +input: +input: + | (- *) + (1 2) = 1 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1491: cat stderr +./calc.at:1486: $PREPARSER ./calc input +580. calc.at:1520: stderr: + skipped (calc.at:1520) +syntax error, unexpected number +error: 2222 != 1 + +587. calc.at:1532: testing Calculator D api.push-pull=both ... +./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +581. calc.at:1521: | (1 + # + 1) = 1111 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1482: cat stderr +582. calc.at:1523: skipped (calc.at:1521) +stderr: +583. calc.at:1524: Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: + skipped (calc.at:1523) stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.3: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token number (2) +Error: discarding token number (2) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: cat stderr stderr: -Starting parse +584. calc.at:1525: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +syntax error, unexpected number +error: 2222 != 1 + skipped (calc.at:1524) + skipped (calc.at:1525) + + + + +588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... +./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +589. calc.at:1544: testing Calculator Java ... +./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +input: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + #) = 1111 +./calc.at:1491: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token '*' () -Shifting token '*' () -Entering state 21 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '+' () +Reading a token +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +591. calc.at:1546: testing Calculator Java parse.error=detailed ... +./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +593. calc.at:1548: testing Calculator Java %locations parse.error=custom ... +./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +input: +588. calc.at:1533: 589. calc.at:1544: 585. calc.at:1530: | (- *) + (1 2) = 1 +./calc.at:1489: $PREPARSER ./calc input + skipped (calc.at:1544) +587. calc.at:1532: stderr: +586. calc.at:1531: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + #) = 1111 -./calc.at:1489: cat stderr -./calc.at:1491: $PREPARSER ./calc input -stderr: -input: - | (#) + (#) = 2222 +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + skipped (calc.at:1530) + skipped (calc.at:1533) + skipped (calc.at:1532) + skipped (calc.at:1531) Starting parse Entering state 0 Reading a token @@ -216932,8 +212984,6 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse @@ -216943,12 +212993,242 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +590. calc.at:1545: testing Calculator Java parse.error=custom ... +./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + + + +592. calc.at:1547: testing Calculator Java parse.error=verbose ... +./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +591. calc.at:1546: ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1546) +stderr: +stderr: +596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose ... +./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +598. calc.at:1554: testing Calculator Java api.push-pull=both ... +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -216970,12 +213250,21 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -217000,19 +213289,20 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2222) + $3 = nterm exp (1) +error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -217033,7 +213323,91 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +592. calc.at:1547: 597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} ... + +./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +595. calc.at:1550: testing Calculator Java %locations parse.error=verbose ... + skipped (calc.at:1547) +./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +593. calc.at:1548: Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + skipped (calc.at:1548) +stderr: Starting parse Entering state 0 Reading a token @@ -217111,7 +213485,78 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + + +594. calc.at:1549: testing Calculator Java %locations parse.error=detailed ... +./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: cat stderr +590. calc.at:1545: skipped (calc.at:1545) +input: + | (1 + 1) / (1 - 1) +./calc.at:1487: $PREPARSER ./calc input + +599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations ... +input: +./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1485: cat stderr +595. calc.at:1550: 596. calc.at:1551: ./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +598. calc.at:1554: ./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + skipped (calc.at:1551) + skipped (calc.at:1550) stderr: +./calc.at:1486: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + skipped (calc.at:1554) Starting parse Entering state 0 Reading a token @@ -217119,28 +213564,128 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both ... + | (# + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1491: cat stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + + | (- *) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input + +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () @@ -217153,92 +213698,186 @@ Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose ... -./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1489: cat stderr input: | (# + 1) = 1111 ./calc.at:1491: $PREPARSER ./calc input +./calc.at:1482: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + input: - | (1 + #) = 1111 -./calc.at:1489: $PREPARSER ./calc input +stderr: + | (* *) + (*) + (*) stderr: Starting parse Entering state 0 @@ -217310,6 +213949,8 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: +input: +./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -217317,33 +213958,25 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -217357,13 +213990,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -217372,24 +214005,49 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +input: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1486: $PREPARSER ./calc input +stderr: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 stderr: +stdout: +./calc.at:1492: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + Starting parse Entering state 0 Reading a token @@ -217460,6 +214118,7 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -217467,27 +214126,19 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -217504,11 +214155,11 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) + $1 = token "number" (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token @@ -217533,227 +214184,89 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1489: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1491: $PREPARSER ./calc input stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (# + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -217765,32 +214278,20 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -217804,18 +214305,6 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -217823,18 +214312,42 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token Next token is token '+' () -Error: discarding token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -217846,32 +214359,54 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -217885,12 +214420,13 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -574. calc.at:1512: skipped (calc.at:1512) -./calc.at:1491: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +597. calc.at:1552: input: +./calc.at:1487: cat stderr +stderr: +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -217900,278 +214436,99 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1489: cat stderr stderr: -Starting parse + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1492: $PREPARSER ./calc input + skipped (calc.at:1552) +594. calc.at:1549: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -218183,32 +214540,20 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -218222,8 +214567,6 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token @@ -218231,32 +214574,15 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -218270,173 +214596,79 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -565. calc.at:1491: ok -./calc.at:1489: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1489: $PREPARSER ./calc input - -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -218450,8 +214682,19 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + skipped (calc.at:1549) +600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both ... +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -218459,101 +214702,55 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 +Error: discarding token '+' () Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -218563,11 +214760,11 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -218577,58 +214774,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -575. calc.at:1514: testing Calculator D %debug ... -./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1489: cat stderr -564. calc.at:1489: ok - -576. calc.at:1516: testing Calculator D parse.error=custom ... -./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -575. calc.at:1514: skipped (calc.at:1514) +./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +562. calc.at:1487: ok -577. calc.at:1517: testing Calculator D %locations parse.error=custom ... -./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -576. calc.at:1516: skipped (calc.at:1516) +./calc.at:1479: cat stderr -578. calc.at:1518: testing Calculator D %locations parse.error=detailed ... -./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -577. calc.at:1517: skipped (calc.at:1517) +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1479: $PREPARSER ./calc input -579. calc.at:1519: testing Calculator D %locations parse.error=simple ... -./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -578. calc.at:1518: skipped (calc.at:1518) stderr: -stdout: -./calc.at:1492: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - - +./calc.at:1489: cat stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1492: $PREPARSER ./calc input -580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose ... -./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: +./calc.at:1486: cat stderr Starting parse Entering state 0 Reading a token @@ -219465,13 +215657,189 @@ Entering state 16 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -579. calc.at:1519: skipped (calc.at:1519) + | (1 + # + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +stderr: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +input: +input: +./calc.at:1482: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 @@ -219768,7 +216136,8 @@ Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) + $1 = token '(' ( | 1 + 2 * 3 + !+ ++ +5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -220305,943 +216674,258 @@ Entering state 16 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: - | 1 2 -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -580. calc.at:1520: input: - skipped (calc.at:1520) - | 1//2 -./calc.at:1492: $PREPARSER ./calc input -581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose ... -./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) - -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -input: - | error -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed ... -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1492: cat stderr -581. calc.at:1521: input: - skipped (calc.at:1521) - | 1 = 2 = 3 -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) - -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom ... -./calc.at:1492: cat stderr -./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -input: - | - | +1 -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -582. calc.at:1523: ./calc.at:1492: cat stderr - skipped (calc.at:1523) -./calc.at:1492: $PREPARSER ./calc /dev/null -stderr: - -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace ... +./calc.at:1486: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -583. calc.at:1524: skipped (calc.at:1524) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1492: $PREPARSER ./calc input - -stderr: +602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full ... Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token number (1) +Error: discarding token number (1) Reading a token -Next token is token ')' (1.28: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +stderr: + | 1 2 stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token -Next token is token '+' (1.20: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1492: $PREPARSER ./calc input +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +input: +stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '*' (1.39: ) +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union ... -./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -input: - | (!!) + (1 2) = 1 -./calc.at:1492: $PREPARSER ./calc input +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -584. calc.at:1525: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (calc.at:1525) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) stderr: Starting parse Entering state 0 @@ -221250,232 +216934,70 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token ')' (1.12: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed ... -./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -input: - | (- *) + (1 2) = 1 -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token ')' (1.5: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -221488,374 +217010,93 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +599. calc.at:1555: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -585. calc.at:1530: Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - skipped (calc.at:1530) - -./calc.at:1492: "$PERL" -pi -e 'use strict; +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -221865,217 +217106,141 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -587. calc.at:1532: testing Calculator D api.push-pull=both ... -./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1492: cat stderr -586. calc.at:1531: skipped (calc.at:1531) -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1492: $PREPARSER ./calc input - stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) +Next token is token '+' () +Shifting token '+' () Entering state 14 Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + skipped (calc.at:1555) Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) +Next token is token '+' () +Shifting token '+' () Entering state 14 Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) input: | 1 + 2 * 3 + !- ++ -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -222088,61 +217253,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1492: "$PERL" -pi -e 'use strict; +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -222152,211 +217267,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -587. calc.at:1532: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (calc.at:1532) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -222366,170 +217277,170 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1492: cat stderr input: - | (1 + #) = 1111 -./calc.at:1492: $PREPARSER ./calc input stderr: +./calc.at:1485: cat stderr + | 1 + 2 * 3 + !- ++ + +./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -589. calc.at:1544: testing Calculator Java ... -./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1479: cat stderr +./calc.at:1491: cat stderr +601. calc.at:1557: input: +605. torture.at:216: testing Big horizontal ... + | (1 + 1) / (1 - 1) +./calc.at:1485: $PREPARSER ./calc input + skipped (calc.at:1557) +./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77 ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -222540,354 +217451,335 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input +604. torture.at:132: testing Big triangle ... stderr: +./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77 +input: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1479: $PREPARSER ./calc input +stderr: +603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... +./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + | (1 + 1) / (1 - 1) +./calc.at:1491: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -590. calc.at:1545: testing Calculator Java parse.error=custom ... -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) ./calc.at:1492: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input -588. calc.at:1533: stderr: +stderr: + Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (calc.at:1533) -stderr: +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -589. calc.at:1544: skipped (calc.at:1544) -input: - | (1 + 1) / (1 - 1) -./calc.at:1492: $PREPARSER ./calc input - +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -223004,8 +217896,152 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1479: $PREPARSER ./calc input +stderr: +input: stderr: +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -223121,7 +218157,12 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +600. calc.at:1556: | 1//2 +./calc.at:1492: $PREPARSER ./calc input +stderr: + skipped (calc.at:1556) +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -223131,74 +218172,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -567. calc.at:1492: ok -590. calc.at:1545: skipped (calc.at:1545) - - -591. calc.at:1546: testing Calculator Java parse.error=detailed ... -./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -592. calc.at:1547: testing Calculator Java parse.error=verbose ... -./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -593. calc.at:1548: testing Calculator Java %locations parse.error=custom ... -./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -591. calc.at:1546: skipped (calc.at:1546) - -594. calc.at:1549: testing Calculator Java %locations parse.error=detailed ... -./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -592. calc.at:1547: skipped (calc.at:1547) -593. calc.at:1548: skipped (calc.at:1548) - - -594. calc.at:1549: 595. calc.at:1550: testing Calculator Java %locations parse.error=verbose ... -./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - skipped (calc.at:1549) - -596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose ... -./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} ... -./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -595. calc.at:1550: skipped (calc.at:1550) - -596. calc.at:1551: skipped (calc.at:1551) -598. calc.at:1554: testing Calculator Java api.push-pull=both ... -./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -597. calc.at:1552: - skipped (calc.at:1552) - -598. calc.at:1554: skipped (calc.at:1554) -601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both ... -./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations ... -./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - -600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both ... -./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -601. calc.at:1557: 602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full ... -./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - skipped (calc.at:1557) - -600. calc.at:1556: skipped (calc.at:1556) -599. calc.at:1555: skipped (calc.at:1555) - - -602. calc.at:1560: skipped (calc.at:1560) - -603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... -./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -604. torture.at:132: testing Big triangle ... -./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77 -605. torture.at:216: testing Big horizontal ... -./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77 +./calc.at:1489: cat stderr +stdout: +607. torture.at:271: testing State number type: 129 states ... +./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77 +stderr: 606. torture.at:270: testing State number type: 128 states ... ./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77 ---- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr 2024-04-02 12:25:51.294445160 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found -606. torture.at:270: skipped (torture.at:270) -603. calc.at:1561: stdout: - skipped (calc.at:1561) %code top { /* -*- c -*- */ /* Adjust to the compiler. We used to do it here, but each time we add a new line, @@ -224295,7 +219275,17 @@ "857" "858" "859" "860" "861" "862" "863" "864" "865" "866" "867" "868" "869" "870" "871" "872" "873" "874" "875" "876" "877" "878" "879" "880" "881" "882" "883" "884" "885" "886" "887" "888" "889" "890" "891" "892" - "893" "894" "895" "896" "897" "898" "899" "900" "901" "902" "903" "904" + "893" "894" "895" "896" "897" "89./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +8" "899" "900" "901" "902" "903" "904" "905" "906" "907" "908" "909" "910" "911" "912" "913" "914" "915" "916" "917" "918" "919" "920" "921" "922" "923" "924" "925" "926" "927" "928" "929" "930" "931" "932" "933" "934" "935" "936" "937" "938" "939" "940" @@ -224323,3863 +219313,970 @@ static int counter = 1; if (counter <= MAX) return counter++; - assert (counter++ == MAX + 1); - return 0; -} -#include /* getenv. */ -#include /* strcmp. */ -int -main (int argc, char const* argv[]) -{ - (void) argc; - (void) argv; - return yyparse (); -} - -./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -608. torture.at:272: testing State number type: 256 states ... -./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77 ---- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr 2024-04-02 12:25:51.414442111 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found -607. torture.at:271: testing State number type: 129 states ... -./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77 -608. torture.at:272: --- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr 2024-04-02 12:25:51.418442009 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found -607. torture.at:271: skipped (torture.at:272) - skipped (torture.at:271) - - -609. torture.at:273: testing State number type: 257 states ... -./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77 ---- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr 2024-04-02 12:25:51.550438655 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found -610. torture.at:274: testing State number type: 32768 states ... -./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77 ---- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr 2024-04-02 12:25:51.554438553 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found -610. torture.at:274: 609. torture.at:273: skipped (torture.at:274) - skipped (torture.at:273) - -stdout: -%code top { /* -*- c -*- */ -/* Adjust to the compiler. - We used to do it here, but each time we add a new line, - we have to adjust all the line numbers in error messages. - It's simpler to use a constant include to a varying file. */ -#include -} - -%define parse.error verbose -%debug -%{ -#include -#include -#include -#define MAX 200 -static int yylex (void); -#include - -/* !POSIX */ static void yyerror (const char *msg); -%} -%union -{ - int val; -}; - -%token END "end" -%type exp input -%token t1 1 "1" -%token t2 2 "2" -%token t3 3 "3" -%token t4 4 "4" -%token t5 5 "5" -%token t6 6 "6" -%token t7 7 "7" -%token t8 8 "8" -%token t9 9 "9" -%token t10 10 "10" -%token t11 11 "11" -%token t12 12 "12" -%token t13 13 "13" -%token t14 14 "14" -%token t15 15 "15" -%token t16 16 "16" -%token t17 17 "17" -%token t18 18 "18" -%token t19 19 "19" -%token t20 20 "20" -%token t21 21 "21" -%token t22 22 "22" -%token t23 23 "23" -%token t24 24 "24" -%token t25 25 "25" -%token t26 26 "26" -%token t27 27 "27" -%token t28 28 "28" -%token t29 29 "29" -%token t30 30 "30" -%token t31 31 "31" -%token t32 32 "32" -%token t33 33 "33" -%token t34 34 "34" -%token t35 35 "35" -%token t36 36 "36" -%token t37 37 "37" -%token t38 38 "38" -%token t39 39 "39" -%token t40 40 "40" -%token t41 41 "41" -%token t42 42 "42" -%token t43 43 "43" -%token t44 44 "44" -%token t45 45 "45" -%token t46 46 "46" -%token t47 47 "47" -%token t48 48 "48" -%token t49 49 "49" -%token t50 50 "50" -%token t51 51 "51" -%token t52 52 "52" -%token t53 53 "53" -%token t54 54 "54" -%token t55 55 "55" -%token t56 56 "56" -%token t57 57 "57" -%token t58 58 "58" -%token t59 59 "59" -%token t60 60 "60" -%token t61 61 "61" -%token t62 62 "62" -%token t63 63 "63" -%token t64 64 "64" -%token t65 65 "65" -%token t66 66 "66" -%token t67 67 "67" -%token t68 68 "68" -%token t69 69 "69" -%token t70 70 "70" -%token t71 71 "71" -%token t72 72 "72" -%token t73 73 "73" -%token t74 74 "74" -%token t75 75 "75" -%token t76 76 "76" -%token t77 77 "77" -%token t78 78 "78" -%token t79 79 "79" -%token t80 80 "80" -%token t81 81 "81" -%token t82 82 "82" -%token t83 83 "83" -%token t84 84 "84" -%token t85 85 "85" -%token t86 86 "86" -%token t87 87 "87" -%token t88 88 "88" -%token t89 89 "89" -%token t90 90 "90" -%token t91 91 "91" -%token t92 92 "92" -%token t93 93 "93" -%token t94 94 "94" -%token t95 95 "95" -%token t96 96 "96" -%token t97 97 "97" -%token t98 98 "98" -%token t99 99 "99" -%token t100 100 "100" -%token t101 101 "101" -%token t102 102 "102" -%token t103 103 "103" -%token t104 104 "104" -%token t105 105 "105" -%token t106 106 "106" -%token t107 107 "107" -%token t108 108 "108" -%token t109 109 "109" -%token t110 110 "110" -%token t111 111 "111" -%token t112 112 "112" -%token t113 113 "113" -%token t114 114 "114" -%token t115 115 "115" -%token t116 116 "116" -%token t117 117 "117" -%token t118 118 "118" -%token t119 119 "119" -%token t120 120 "120" -%token t121 121 "121" -%token t122 122 "122" -%token t123 123 "123" -%token t124 124 "124" -%token t125 125 "125" -%token t126 126 "126" -%token t127 127 "127" -%token t128 128 "128" -%token t129 129 "129" -%token t130 130 "130" -%token t131 131 "131" -%token t132 132 "132" -%token t133 133 "133" -%token t134 134 "134" -%token t135 135 "135" -%token t136 136 "136" -%token t137 137 "137" -%token t138 138 "138" -%token t139 139 "139" -%token t140 140 "140" -%token t141 141 "141" -%token t142 142 "142" -%token t143 143 "143" -%token t144 144 "144" -%token t145 145 "145" -%token t146 146 "146" -%token t147 147 "147" -%token t148 148 "148" -%token t149 149 "149" -%token t150 150 "150" -%token t151 151 "151" -%token t152 152 "152" -%token t153 153 "153" -%token t154 154 "154" -%token t155 155 "155" -%token t156 156 "156" -%token t157 157 "157" -%token t158 158 "158" -%token t159 159 "159" -%token t160 160 "160" -%token t161 161 "161" -%token t162 162 "162" -%token t163 163 "163" -%token t164 164 "164" -%token t165 165 "165" -%token t166 166 "166" -%token t167 167 "167" -%token t168 168 "168" -%token t169 169 "169" -%token t170 170 "170" -%token t171 171 "171" -%token t172 172 "172" -%token t173 173 "173" -%token t174 174 "174" -%token t175 175 "175" -%token t176 176 "176" -%token t177 177 "177" -%token t178 178 "178" -%token t179 179 "179" -%token t180 180 "180" -%token t181 181 "181" -%token t182 182 "182" -%token t183 183 "183" -%token t184 184 "184" -%token t185 185 "185" -%token t186 186 "186" -%token t187 187 "187" -%token t188 188 "188" -%token t189 189 "189" -%token t190 190 "190" -%token t191 191 "191" -%token t192 192 "192" -%token t193 193 "193" -%token t194 194 "194" -%token t195 195 "195" -%token t196 196 "196" -%token t197 197 "197" -%token t198 198 "198" -%token t199 199 "199" -%token t200 200 "200" -%% -input: - exp { assert ($1 == 0); $$ = $1; } -| input exp { assert ($2 == $1 + 1); $$ = $2; } -; - -exp: - END - { $$ = 0; } -| "1" END - { $$ = 1; } -| "1" "2" END - { $$ = 2; } -| "1" "2" "3" END - { $$ = 3; } -| "1" "2" "3" "4" END - { $$ = 4; } -| "1" "2" "3" "4" "5" END - { $$ = 5; } -| "1" "2" "3" "4" "5" "6" END - { $$ = 6; } -| "1" "2" "3" "4" "5" "6" "7" END - { $$ = 7; } -| "1" "2" "3" "4" "5" "6" "7" "8" END - { $$ = 8; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" END - { $$ = 9; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" END - { $$ = 10; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" END - { $$ = 11; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" END - { $$ = 12; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" END - { $$ = 13; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" END - { $$ = 14; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" END - { $$ = 15; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - END - { $$ = 16; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" END - { $$ = 17; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" END - { $$ = 18; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" END - { $$ = 19; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" END - { $$ = 20; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" END - { $$ = 21; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" END - { $$ = 22; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" END - { $$ = 23; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" END - { $$ = 24; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" END - { $$ = 25; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" END - { $$ = 26; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" END - { $$ = 27; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" END - { $$ = 28; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" END - { $$ = 29; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - END - { $$ = 30; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" END - { $$ = 31; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" END - { $$ = 32; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" END - { $$ = 33; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" END - { $$ = 34; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" END - { $$ = 35; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" END - { $$ = 36; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" END - { $$ = 37; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" END - { $$ = 38; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" END - { $$ = 39; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" END - { $$ = 40; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" END - { $$ = 41; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" END - { $$ = 42; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" END - { $$ = 43; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - END - { $$ = 44; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" END - { $$ = 45; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" END - { $$ = 46; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" END - { $$ = 47; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" END - { $$ = 48; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" END - { $$ = 49; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" END - { $$ = 50; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" END - { $$ = 51; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" END - { $$ = 52; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" END - { $$ = 53; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" END - { $$ = 54; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" END - { $$ = 55; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" END - { $$ = 56; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" END - { $$ = 57; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - END - { $$ = 58; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" END - { $$ = 59; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" END - { $$ = 60; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" END - { $$ = 61; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" END - { $$ = 62; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" END - { $$ = 63; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" END - { $$ = 64; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" END - { $$ = 65; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" END - { $$ = 66; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" END - { $$ = 67; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" END - { $$ = 68; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" END - { $$ = 69; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" END - { $$ = 70; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" END - { $$ = 71; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - END - { $$ = 72; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" END - { $$ = 73; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" END - { $$ = 74; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" END - { $$ = 75; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" END - { $$ = 76; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" END - { $$ = 77; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" END - { $$ = 78; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" END - { $$ = 79; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" END - { $$ = 80; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" END - { $$ = 81; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" END - { $$ = 82; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" END - { $$ = 83; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" END - { $$ = 84; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" END - { $$ = 85; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - END - { $$ = 86; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" END - { $$ = 87; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" END - { $$ = 88; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" END - { $$ = 89; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" END - { $$ = 90; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" END - { $$ = 91; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" END - { $$ = 92; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" END - { $$ = 93; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" END - { $$ = 94; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" END - { $$ = 95; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" END - { $$ = 96; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" END - { $$ = 97; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" END - { $$ = 98; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" END - { $$ = 99; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - END - { $$ = 100; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" END - { $$ = 101; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" END - { $$ = 102; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" END - { $$ = 103; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" END - { $$ = 104; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" END - { $$ = 105; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" END - { $$ = 106; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" END - { $$ = 107; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" END - { $$ = 108; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" END - { $$ = 109; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" END - { $$ = 110; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" END - { $$ = 111; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - END - { $$ = 112; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" END - { $$ = 113; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" END - { $$ = 114; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" END - { $$ = 115; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" END - { $$ = 116; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" END - { $$ = 117; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" END - { $$ = 118; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" END - { $$ = 119; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" END - { $$ = 120; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" END - { $$ = 121; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" END - { $$ = 122; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" END - { $$ = 123; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - END - { $$ = 124; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" END - { $$ = 125; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" END - { $$ = 126; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" END - { $$ = 127; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" END - { $$ = 128; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" END - { $$ = 129; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" END - { $$ = 130; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" END - { $$ = 131; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" END - { $$ = 132; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" END - { $$ = 133; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" END - { $$ = 134; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" END - { $$ = 135; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - END - { $$ = 136; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" END - { $$ = 137; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" END - { $$ = 138; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" END - { $$ = 139; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" END - { $$ = 140; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" END - { $$ = 141; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" END - { $$ = 142; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" END - { $$ = 143; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" END - { $$ = 144; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" END - { $$ = 145; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" END - { $$ = 146; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" END - { $$ = 147; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - END - { $$ = 148; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" END - { $$ = 149; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" END - { $$ = 150; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" END - { $$ = 151; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" END - { $$ = 152; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" END - { $$ = 153; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" END - { $$ = 154; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" END - { $$ = 155; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" END - { $$ = 156; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" END - { $$ = 157; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" END - { $$ = 158; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" END - { $$ = 159; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - END - { $$ = 160; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" END - { $$ = 161; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" END - { $$ = 162; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" END - { $$ = 163; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" END - { $$ = 164; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" END - { $$ = 165; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" END - { $$ = 166; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" END - { $$ = 167; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" END - { $$ = 168; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" END - { $$ = 169; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" END - { $$ = 170; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" END - { $$ = 171; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - END - { $$ = 172; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" END - { $$ = 173; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" END - { $$ = 174; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" END - { $$ = 175; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" END - { $$ = 176; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" END - { $$ = 177; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" END - { $$ = 178; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" END - { $$ = 179; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" END - { $$ = 180; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" END - { $$ = 181; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" END - { $$ = 182; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" END - { $$ = 183; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - END - { $$ = 184; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" END - { $$ = 185; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" END - { $$ = 186; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" END - { $$ = 187; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" END - { $$ = 188; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" END - { $$ = 189; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" END - { $$ = 190; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" END - { $$ = 191; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" END - { $$ = 192; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" END - { $$ = 193; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" END - { $$ = 194; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" END - { $$ = 195; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - END - { $$ = 196; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" END - { $$ = 197; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" END - { $$ = 198; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" END - { $$ = 199; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" "200" END - { $$ = 200; } -; -%% - - - - -/* A C error reporting function. */ -/* !POSIX */ static -void yyerror (const char *msg) -{ - fprintf (stderr, "%s\n", msg); -} -static int -yylex (void) -{ - static int inner = 1; - static int outer = 0; - if (outer > MAX) - return 0; - else if (inner > outer) - { - inner = 1; - ++outer; - return END; - } - return inner++; -} -#include /* getenv. */ -#include /* strcmp. */ -int -main (int argc, char const* argv[]) -{ - (void) argc; - (void) argv; - return yyparse (); -} - -./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -611. torture.at:275: testing State number type: 65536 states ... -./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77 ---- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr 2024-04-02 12:25:51.682435301 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found -612. torture.at:276: testing State number type: 65537 states ... -./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77 ---- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr 2024-04-02 12:25:51.714434488 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found -611. torture.at:275: 612. torture.at:276: skipped (torture.at:275) - skipped (torture.at:276) - - -stderr: -stdout: -./calc.at:1492: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1492: $PREPARSER ./calc input -614. torture.at:485: testing Exploding the Stack Size with Alloca ... -./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -613. torture.at:385: testing Many lookahead tokens ... -./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (14.1: ) -Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 + assert (counter++ == MAX + 1); + return 0; +} +#include /* getenv. */ +#include /* strcmp. */ +int +main (int argc, char const* argv[]) +{ + (void) argc; + (void) argv; + return yyparse (); +} +Starting parse +Entering state 0 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +608. torture.at:272: testing State number type: 256 states ... +./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77 +input: +./calc.at:1486: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +--- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr 2025-05-05 18:55:03.404300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found +./calc.at:1482: $PREPARSER ./calc input +--- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr 2025-05-05 18:55:03.404300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +--- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr 2025-05-05 18:55:03.408300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found +607. torture.at:271: +./calc.at:1491: cat stderr +input: + skipped (torture.at:271) + | (#) + (#) = 2222 +606. torture.at:270: ./calc.at:1489: $PREPARSER ./calc input +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +stderr: +608. torture.at:272: input: + skipped (torture.at:270) + skipped (torture.at:272) +Starting parse +Entering state 0 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +Starting parse +Entering state 0 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) + | (#) + (#) = 2222 +559. calc.at:1485: ok +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 -Next token is token '\n' (9.15-10.0: ) +Reading a token +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: $PREPARSER ./calc input +565. calc.at:1491: + ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 28 Reading a token -Next token is token ')' (10.11: ) +Next token is token ')' () Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + +602. calc.at:1560: ./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (#) + (#) = 2222 +./calc.at:1479: $PREPARSER ./calc input + skipped (calc.at:1560) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr +stderr: + +syntax error: invalid character: '#' +syntax error: invalid character: '#' +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + + | error +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: $PREPARSER ./calc input + +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1489: cat stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +610. torture.at:274: testing State number type: 32768 states ... +./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77 +609. torture.at:273: testing State number type: 257 states ... +./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77 +./calc.at:1479: cat stderr +--- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr 2025-05-05 18:55:03.456300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found +603. calc.at:1561: --- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr 2025-05-05 18:55:03.460300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found +557. calc.at:1482: ok +610. torture.at:274: ./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + skipped (calc.at:1561) +609. torture.at:273: skipped (torture.at:274) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + skipped (torture.at:273) + input: - | 1 2 -./calc.at:1492: $PREPARSER ./calc input +input: + | (1 + #) = 1111 +./calc.at:1479: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1492: cat stderr + +613. torture.at:385: testing Many lookahead tokens ... +612. torture.at:276: testing State number type: 65537 states ... +./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77 + +611. torture.at:275: testing State number type: 65536 states ... +./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77 +./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77 +./calc.at:1486: cat stderr +616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ... +stderr: +614. torture.at:485: testing Exploding the Stack Size with Alloca ... +617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ... +--- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr 2025-05-05 18:55:03.528300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found stderr: +syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y + +615. torture.at:531: testing Exploding the Stack Size with Malloc ... +./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +--- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr 2025-05-05 18:55:03.528300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found stdout: %define parse.error verbose %debug @@ -231336,6 +223433,2398 @@ (void) argv; return yyparse (); } +./torture.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +612. torture.at:276: 611. torture.at:275: ./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y + skipped (torture.at:276) + skipped (torture.at:275) +stderr: +input: + | 1 = 2 = 3 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + +syntax error: invalid character: '#' +618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ... +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +input: +./torture.at:494: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input +620. existing.at:808: testing GNU Cim Grammar: IELR(1) ... +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stderr: +619. existing.at:808: testing GNU Cim Grammar: LALR(1) ... +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stdout: +%code top { /* -*- c -*- */ +/* Adjust to the compiler. + We used to do it here, but each time we add a new line, + we have to adjust all the line numbers in error messages. + It's simpler to use a constant include to a varying file. */ +#include +} + +%define parse.error verbose +%debug +%{ +#include +#include +#include +#define MAX 200 +static int yylex (void); +#include + +/* !POSIX */ static void yyerror (const char *msg); +%} +%union +{ + int val; +}; + +%token END "end" +%type exp input +%token t1 1 "1" +%token t2 2 "2" +%token t3 3 "3" +%token t4 4 "4" +%token t5 5 "5" +%token t6 6 "6" +%token t7 7 "7" +%token t8 8 "8" +%token t9 9 "9" +%token t10 10 "10" +%token t11 11 "11" +%token t12 12 "12" +%token t13 13 "13" +%token t14 14 "14" +%token t15 15 "15" +%token t16 16 "16" +%token t17 17 "17" +%token t18 18 "18" +%token t19 19 "19" +%token t20 20 "20" +%token t21 21 "21" +%token t22 22 "22" +%token t23 23 "23" +%token t24 24 "24" +%token t25 25 "25" +%token t26 26 "26" +%token t27 27 "27" +%token t28 28 "28" +%token t29 29 "29" +%token t30 30 "30" +%token t31 31 "31" +%token t32 32 "32" +%token t33 33 "33" +%token t34 34 "34" +%token t35 35 "35" +%token t36 36 "36" +%token t37 37 "37" +%token t38 38 "38" +%token t39 39 "39" +%token t40 40 "40" +%token t41 41 "41" +%token t42 42 "42" +%token t43 43 "43" +%token t44 44 "44" +%token t45 45 "45" +%token t46 46 "46" +%token t47 47 "47" +%token t48 48 "48" +%token t49 49 "49" +%token t50 50 "50" +%token t51 51 "51" +%token t52 52 "52" +%token t53 53 "53" +%token t54 54 "54" +%token t55 55 "55" +%token t56 56 "56" +%token t57 57 "57" +%token t58 58 "58" +%token t59 59 "59" +%token t60 60 "60" +%token t61 61 "61" +%token t62 62 "62" +%token t63 63 "63" +%token t64 64 "64" +%token t65 65 "65" +%token t66 66 "66" +%token t67 67 "67" +%token t68 68 "68" +%token t69 69 "69" +%token t70 70 "70" +%token t71 71 "71" +%token t72 72 "72" +%token t73 73 "73" +%token t74 74 "74" +%token t75 75 "75" +%token t76 76 "76" +%token t77 77 "77" +%token t78 78 "78" +%token t79 79 "79" +%token t80 80 "80" +%token t81 81 "81" +%token t82 82 "82" +%token t83 83 "83" +%token t84 84 "84" +%token t85 85 "85" +%token t86 86 "86" +%token t87 87 "87" +%token t88 88 "88" +%token t89 89 "89" +%token t90 90 "90" +%token t91 91 "91" +%token t92 92 "92" +%token t93 93 "93" +%token t94 94 "94" +%token t95 95 "95" +%token t96 96 "96" +%token t97 97 "97" +%token t98 98 "98" +%token t99 99 "99" +%token t100 100 "100" +%token t101 101 "101" +%token t102 102 "102" +%token t103 103 "103" +%token t104 104 "104" +%token t105 105 "105" +%token t106 106 "106" +%token t107 107 "107" +%token t108 108 "108" +%token t109 109 "109" +%token t110 110 "110" +%token t111 111 "111" +%token t112 112 "112" +%token t113 113 "113" +%token t114 114 "114" +%token t115 115 "115" +%token t116 116 "116" +%token t117 117 "117" +%token t118 118 "118" +%token t119 119 "119" +%token t120 120 "120" +%token t121 121 "121" +%token t122 122 "122" +%token t123 123 "123" +%token t124 124 "124" +%token t125 125 "125" +%token t126 126 "126" +%token t127 127 "127" +%token t128 128 "128" +%token t129 129 "129" +%token t130 130 "130" +%token t131 131 "131" +%token t132 132 "132" +%token t133 133 "133" +%token t134 134 "134" +%token t135 135 "135" +%token t136 136 "136" +%token t137 137 "137" +%token t138 138 "138" +%token t139 139 "139" +%token t140 140 "140" +%token t141 141 "141" +%token t142 142 "142" +%token t143 143 "143" +%token t144 144 "144" +%token t145 145 "145" +%token t146 146 "146" +%token t147 147 "147" +%token t148 148 "148" +%token t149 149 "149" +%token t150 150 "150" +%token t151 151 "151" +%token t152 152 "152" +%token t153 153 "153" +%token t154 154 "154" +%token t155 155 "155" +%token t156 156 "156" +%token t157 157 "157" +%token t158 158 "158" +%token t159 159 "159" +%token t160 160 "160" +%token t161 161 "161" +%token t162 162 "162" +%token t163 163 "163" +%token t164 164 "164" +%token t165 165 "165" +%token t166 166 "166" +%token t167 167 "167" +%token t168 168 "168" +%token t169 169 "169" +%token t170 170 "170" +%token t171 171 "171" +%token t172 172 "172" +%token t173 173 "173" +%token t174 174 "174" +%token t175 175 "175" +%token t176 176 "176" +%token t177 177 "177" +%token t178 178 "178" +%token t179 179 "179" +%token t180 180 "180" +%token t181 181 "181" +%token t182 182 "182" +%token t183 183 "183" +%token t184 184 "184" +%token t185 185 "185" +%token t186 186 "186" +%token t187 187 "187" +%token t188 188 "188" +%token t189 189 "189" +%token t190 190 "190" +%token t191 191 "191" +%token t192 192 "192" +%token t193 193 "193" +%token t194 194 "194" +%token t195 195 "195" +%token t196 196 "196" +%token t197 197 "197" +%token t198 198 "198" +%token t199 199 "199" +%token t200 200 "200" +%% +input: + exp { assert ($1 == 0); $$ = $1; } +| input exp { assert ($2 == $1 + 1); $$ = $2; } +; + +exp: + END + { $$ = 0; } +| "1" END + { $$ = 1; } +| "1" "2" END + { $$ = 2; } +| "1" "2" "3" END + { $$ = 3; } +| "1" "2" "3" "4" END + { $$ = 4; } +| "1" "2" "3" "4" "5" END + { $$ = 5; } +| "1" "2" "3" "4" "5" "6" END + { $$ = 6; } +| "1" "2" "3" "4" "5" "6" "7" END + { $$ = 7; } +| "1" "2" "3" "4" "5" "6" "7" "8" END + { $$ = 8; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" END + { $$ = 9; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" END + { $$ = 10; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" END + { $$ = 11; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" END + { $$ = 12; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" END + { $$ = 13; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" END + { $$ = 14; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" END + { $$ = 15; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + END + { $$ = 16; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" END + { $$ = 17; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" END + { $$ = 18; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" END + { $$ = 19; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" END + { $$ = 20; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" END + { $$ = 21; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" END + { $$ = 22; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" END + { $$ = 23; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" END + { $$ = 24; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" END + { $$ = 25; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" END + { $$ = 26; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" END + { $$ = 27; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" END + { $$ = 28; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" END + { $$ = 29; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + END + { $$ = 30; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" END + { $$ = 31; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" END + { $$ = 32; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" END + { $$ = 33; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" END + { $$ = 34; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" END + { $$ = 35; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" END + { $$ = 36; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" END + { $$ = 37; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" END + { $$ = 38; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" END + { $$ = 39; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" END + { $$ = 40; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" END + { $$ = 41; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" END + { $$ = 42; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" END + { $$ = 43; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + END + { $$ = 44; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" END + { $$ = 45; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" END + { $$ = 46; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" END + { $$ = 47; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" END + { $$ = 48; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" END + { $$ = 49; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" END + { $$ = 50; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" END + { $$ = 51; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" END + { $$ = 52; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" END + { $$ = 53; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" END + { $$ = 54; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" END + { $$ = 55; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" END + { $$ = 56; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" END + { $$ = 57; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + END + { $$ = 58; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" END + { $$ = 59; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" END + { $$ = 60; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" END + { $$ = 61; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" END + { $$ = 62; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" END + { $$ = 63; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" END + { $$ = 64; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" END + { $$ = 65; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" END + { $$ = 66; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" END + { $$ = 67; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" END + { $$ = 68; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" END + { $$ = 69; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" END + { $$ = 70; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" END + { $$ = 71; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + END + { $$ = 72; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" END + { $$ = 73; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" END + { $$ = 74; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" END + { $$ = 75; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" END + { $$ = 76; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" END + { $$ = 77; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" END + { $$ = 78; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" END + { $$ = 79; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" END + { $$ = 80; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" END + { $$ = 81; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" END + { $$ = 82; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" END + { $$ = 83; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" END + { $$ = 84; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" END + { $$ = 85; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + END + { $$ = 86; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" END + { $$ = 87; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" END + { $$ = 88; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" END + { $$ = 89; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" END + { $$ = 90; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" END + { $$ = 91; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" END + { $$ = 92; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" END + { $$ = 93; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" END + { $$ = 94; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" END + { $$ = 95; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" END + { $$ = 96; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" END + { $$ = 97; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" END + { $$ = 98; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" END + { $$ = 99; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + END + { $$ = 100; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" END + { $$ = 101; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" END + { $$ = 102; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" END + { $$ = 103; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" END + { $$ = 104; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" END + { $$ = 105; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" END + { $$ = 106; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" END + { $$ = 107; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" END + { $$ = 108; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" END + { $$ = 109; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" END + { $$ = 110; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" END + { $$ = 111; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + END + { $$ = 112; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" END + { $$ = 113; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" END + { $$ = 114; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" END + { $$ = 115; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" END + { $$ = 116; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" END + { $$ = 117; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" END + { $$ = 118; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" END + { $$ = 119; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" END + { $$ = 120; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" END + { $$ = 121; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" END + { $$ = 122; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" END + { $$ = 123; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + END + { $$ = 124; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" END + { $$ = 125; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" END + { $$ = 126; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" END + { $$ = 127; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" END + { $$ = 128; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" END + { $$ = 129; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" END + { $$ = 130; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" END + { $$ = 131; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" END + { $$ = 132; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" END + { $$ = 133; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" END + { $$ = 134; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" END + { $$ = 135; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + END + { $$ = 136; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" END + { $$ = 137; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" END + { $$ = 138; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" END + { $$ = 139; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" END + { $$ = 140; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" END + { $$ = 141; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" END + { $$ = 142; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" END + { $$ = 143; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" END + { $$ = 144; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" END + { $$ = 145; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" END + { $$ = 146; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" END + { $$ = 147; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + END + { $$ = 148; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" END + { $$ = 149; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" END + { $$ = 150; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" END + { $$ = 151; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" END + { $$ = 152; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" END + { $$ = 153; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" END + { $$ = 154; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" END + { $$ = 155; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" END + { $$ = 156; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" END + { $$ = 157; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" END + { $$ = 158; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" END + { $$ = 159; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + END + { $$ = 160; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" END + { $$ = 161; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" END + { $$ = 162; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" END + { $$ = 163; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" END + { $$ = 164; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" END + { $$ = 165; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" END + { $$ = 166; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" END + { $$ = 167; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" END + { $$ = 168; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" END + { $$ = 169; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" END + { $$ = 170; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" END + { $$ = 171; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + END + { $$ = 172; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" END + { $$ = 173; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" END + { $$ = 174; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" END + { $$ = 175; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" END + { $$ = 176; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" END + { $$ = 177; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" END + { $$ = 178; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" END + { $$ = 179; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" END + { $$ = 180; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" END + { $$ = 181; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" END + { $$ = 182; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" END + { $$ = 183; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + END + { $$ = 184; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" END + { $$ = 185; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" END + { $$ = 186; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" END + { $$ = 187; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" END + { $$ = 188; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" END + { $$ = 189; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" END + { $$ = 190; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" END + { $$ = 191; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" END + { $$ = 192; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" END + { $$ = 193; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" END + { $$ = 194; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" END + { $$ = 195; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + END + { $$ = 196; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" END + { $$ = 197; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" END + { $$ = 198; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" END + { $$ = 199; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" "200" END + { $$ = 200; } +; +%% + + + + +/* A C error reporting function. */ +/* !POSIX */ static +void yyerror (const char *msg) +{ + fprintf (stderr, "%s\n", msg); +} +static int +yylex (void) +{ + static int inner = 1; + static int outer = 0; + if (outer > MAX) + return 0; + else if (inner > outer) + { + inner = 1; + ++outer; + return END; + } + return inner++; +} +#include /* getenv. */ +#include /* strcmp. */ +int +main (int argc, char const* argv[]) +{ + (void) argc; + (void) argv; + return yyparse (); +} +./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ... +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +input: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror + | (# + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -231346,56 +225835,483 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1492: cat stderr -./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +./calc.at:1486: cat stderr +./calc.at:1489: cat stderr +syntax error: invalid character: '#' + | + | +1 +./calc.at:1492: $PREPARSER ./calc input +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +input: +stderr: + | (# + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +stdout: +./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +stderr: +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +input: + | (# + 1) = 1111 +stderr: +./calc.at:1486: $PREPARSER ./calc input +stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stdout: +./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./calc.at:1479: cat stderr +./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +stderr: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error input: - | 1//2 -./calc.at:1492: $PREPARSER ./calc input -./torture.at:494: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./calc.at:1492: cat stderr +./calc.at:1492: $PREPARSER ./calc /dev/null +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -231405,26 +226321,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -input: - | error -./calc.at:1492: $PREPARSER ./calc input +stderr: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +stderr: +stderr: +./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +memory exhausted +memory exhausted +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -231434,71 +226356,103 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1492: $PREPARSER ./calc input +memory exhausted +memory exhausted +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1486: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -231509,53 +226463,100 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -input: - | - | +1 -./calc.at:1492: $PREPARSER ./calc input stderr: +stderr: +memory exhausted +memory exhausted +./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1492: cat stderr +syntax error: invalid character: '#' +stderr: +memory exhausted +memory exhausted +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -231565,24 +226566,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -./calc.at:1492: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +input: + | (1 + # + 1) = 1111 +input: +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1479: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -231592,10 +226584,93 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1492: $PREPARSER ./calc input +input: +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -231626,11 +226701,11 @@ Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) + $1 = token "number" (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token @@ -231638,11 +226713,11 @@ Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) + $1 = token "number" (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token @@ -231657,11 +226732,11 @@ Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) + $1 = token "number" (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token @@ -231747,11 +226822,11 @@ Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) + $1 = token "number" (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token @@ -231759,11 +226834,11 @@ Shifting token '*' (1.35: ) Entering state 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) + $1 = token "number" (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Reading a token @@ -231810,11 +226885,11 @@ Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) + $1 = token "number" (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token @@ -231840,12 +226915,99 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: + | (1 + 1) / (1 - 1) +./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -231875,11 +227037,11 @@ Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) + $1 = token "number" (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token @@ -231887,11 +227049,11 @@ Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) + $1 = token "number" (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token @@ -231906,11 +227068,11 @@ Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) + $1 = token "number" (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token @@ -231996,11 +227158,11 @@ Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) + $1 = token "number" (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token @@ -232008,11 +227170,11 @@ Shifting token '*' (1.35: ) Entering state 21 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) + $1 = token "number" (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Reading a token @@ -232059,11 +227221,11 @@ Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) + $1 = token "number" (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token @@ -232089,10 +227251,27 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: cat stderr +./torture.at:510: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +error: null divisor +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -232103,10 +227282,513 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +error: null divisor +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () + | (1 + 1) / (1 - 1) +./calc.at:1486: $PREPARSER ./calc input ./calc.at:1492: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () input: | (!!) + (1 2) = 1 ./calc.at:1492: $PREPARSER ./calc input +./calc.at:1489: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./torture.at:548: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -232146,21 +227828,21 @@ Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) + $1 = token "number" (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token number (1.11: 2) +Next token is token "number" (1.11: 2) 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) Shifting token error (1.9-11: ) Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token Next token is token ')' (1.12: ) Entering state 11 @@ -232185,11 +227867,11 @@ Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token @@ -232215,11 +227897,17 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +564. calc.at:1489: ok +./calc.at:1486: cat stderr +./calc.at:1479: cat stderr ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +561. calc.at:1486: ok +553. calc.at:1479: ok + stderr: Starting parse Entering state 0 @@ -232259,21 +227947,21 @@ Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) + $1 = token "number" (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token number (1.11: 2) +Next token is token "number" (1.11: 2) 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) Shifting token error (1.9-11: ) Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token Next token is token ')' (1.12: ) Entering state 11 @@ -232298,11 +227986,11 @@ Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token @@ -232328,9 +228016,9 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -232342,6 +228030,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + + ./calc.at:1492: cat stderr input: | (- *) + (1 2) = 1 @@ -232390,21 +228080,21 @@ Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.10: 1) + $1 = token "number" (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token number (1.12: 2) +Next token is token "number" (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Shifting token error (1.10-12: ) Entering state 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token Next token is token ')' (1.13: ) Entering state 11 @@ -232429,11 +228119,11 @@ Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.17: 1) + $1 = token "number" (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token @@ -232459,9 +228149,9 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: @@ -232508,21 +228198,21 @@ Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.10: 1) + $1 = token "number" (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token number (1.12: 2) +Next token is token "number" (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Shifting token error (1.10-12: ) Entering state 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token Next token is token ')' (1.13: ) Entering state 11 @@ -232547,11 +228237,11 @@ Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.17: 1) + $1 = token "number" (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token @@ -232577,9 +228267,9 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -232591,6 +228281,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none ./calc.at:1492: cat stderr input: | (* *) + (*) + (*) @@ -232707,12 +228400,138 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +625. regression.at:25: testing Trivial grammars ... +./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 Starting parse Entering state 0 Reading a token @@ -232824,10 +228643,11 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -232838,19 +228658,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +626. regression.at:55: testing YYSTYPE typedef ... ./calc.at:1492: cat stderr +./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y input: | 1 + 2 * 3 + !+ ++ ./calc.at:1492: $PREPARSER ./calc input +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -232858,11 +228683,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -232870,11 +228695,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -232909,15 +228734,16 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:44: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -232925,11 +228751,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -232937,11 +228763,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -232979,14 +228805,104 @@ | 1 + 2 * 3 + !- ++ ./calc.at:1492: $PREPARSER ./calc input stderr: +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./regression.at:74: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -232994,11 +228910,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -233006,11 +228922,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -233045,15 +228961,49 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +stderr: +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -233061,11 +229011,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -233073,11 +229023,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -233111,6 +229061,8 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -233121,11 +229073,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +stderr: +stdout: +stderr: ./calc.at:1492: cat stderr +stdout: +626. regression.at:55: stderr: + ok +stdout: +./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +stderr: + +stdout: +./regression.at:45: $CC $CFLAGS $CPPFLAGS -c -o input.o -DYYDEBUG -c input.c +stderr: +stderr: +./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +627. regression.at:85: testing Early token definitions with --yacc ... +./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc -o input.c input.y input: | (#) + (#) = 2222 ./calc.at:1492: $PREPARSER ./calc input stderr: +stderr: +./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -233190,11 +229169,11 @@ Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) + $1 = token "number" (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token @@ -233219,11 +229198,48 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1494: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./regression.at:116: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stderr: +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -233289,11 +229305,11 @@ Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) + $1 = token "number" (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token @@ -233318,114 +229334,94 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 stderr: -stdout: -./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr -stderr: -stderr: -./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -stdout: -input: -./calc.at:1491: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -stderr: - | (1 + #) = 1111 -./calc.at:1492: $PREPARSER ./calc input -./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +memory exhausted stderr: +./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y stderr: -./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -233433,136 +229429,774 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -memory exhausted -memory exhausted -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -stderr: -./calc.at:1491: $PREPARSER ./calc input -Starting parse -Entering state 0 +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -memory exhausted -memory exhausted -./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -233570,11 +230204,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) + $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -233582,11 +230216,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) + $1 = token "number" (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -233608,11 +230242,11 @@ Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13: 7) + $1 = token "number" (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token @@ -233636,11 +230270,11 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.1: 1) + $1 = token "number" (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token @@ -233648,11 +230282,11 @@ Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.5: 2) + $1 = token "number" (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token @@ -233664,11 +230298,11 @@ Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.10: 3) + $1 = token "number" (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token @@ -233700,11 +230334,11 @@ Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2.15: 5) + $1 = token "number" (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token @@ -233752,11 +230386,11 @@ Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.2: 1) + $1 = token "number" (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token @@ -233764,11 +230398,11 @@ Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.4: 2) + $1 = token "number" (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token @@ -233793,11 +230427,11 @@ Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (4.9: 1) + $1 = token "number" (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token @@ -233836,11 +230470,11 @@ Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.3: 1) + $1 = token "number" (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token @@ -233864,11 +230498,11 @@ Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.6: 2) + $1 = token "number" (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token @@ -233883,11 +230517,11 @@ Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (5.10: 1) + $1 = token "number" (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token @@ -233937,11 +230571,11 @@ Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.4: 1) + $1 = token "number" (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token @@ -233971,11 +230605,11 @@ Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (7.9: 1) + $1 = token "number" (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token @@ -234019,11 +230653,11 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.1: 1) + $1 = token "number" (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token @@ -234031,11 +230665,11 @@ Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.5: 2) + $1 = token "number" (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token @@ -234050,11 +230684,11 @@ Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.9: 3) + $1 = token "number" (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token @@ -234073,11 +230707,11 @@ Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (9.14: 4) + $1 = token "number" (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token @@ -234108,11 +230742,11 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.1: 1) + $1 = token "number" (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token @@ -234124,11 +230758,11 @@ Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.6: 2) + $1 = token "number" (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token @@ -234136,11 +230770,11 @@ Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.10: 3) + $1 = token "number" (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token @@ -234172,11 +230806,11 @@ Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (10.15: 2) + $1 = token "number" (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token @@ -234214,11 +230848,11 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.1: 2) + $1 = token "number" (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token @@ -234226,11 +230860,11 @@ Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.3: 2) + $1 = token "number" (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token @@ -234238,11 +230872,11 @@ Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.5: 3) + $1 = token "number" (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token @@ -234264,11 +230898,11 @@ Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (12.9-11: 256) + $1 = token "number" (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token @@ -234297,11 +230931,11 @@ Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) + $1 = token "number" (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token @@ -234309,11 +230943,11 @@ Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) + $1 = token "number" (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token @@ -234338,11 +230972,11 @@ Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) + $1 = token "number" (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token @@ -234357,11 +230991,11 @@ Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) + $1 = token "number" (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token @@ -234387,82 +231021,91 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token end of input (14.1: ) +Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +memory exhausted +memory exhausted +memory exhausted +614. torture.at:485: ok +./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y + +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stdout: +./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +input: + | 1 2 +./calc.at:1494: $PREPARSER ./calc input +stderr: +memory exhausted +memory exhausted +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1491: $PREPARSER ./calc input +615. torture.at:531: ok + +628. regression.at:127: testing Early token definitions without --yacc ... +./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1492: cat stderr +./torture.at:140: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./torture.at:237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: +stdout: +627. regression.at:85: ok + stderr: Starting parse Entering state 0 @@ -235300,108 +231943,41 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 2 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: $PREPARSER ./calc input +./regression.at:162: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +629. regression.at:173: testing Braces parsing ... +./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +stderr: +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token number (1.3: 2) +Next token is token "number" (1.3: 2) 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -./calc.at:1492: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token "number" (1.3: 2) +input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' stderr: +input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' Starting parse Entering state 0 Reading a token @@ -235413,181 +231989,69 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -input: - | (1 + # + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1491: cat stderr -Starting parse -Entering state 0 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -235595,1508 +232059,766 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./torture.at:510: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: - | 1//2 -./calc.at:1491: $PREPARSER ./calc input -stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: - | (1 + 1) / (1 - 1) -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1491: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 -Reading a token -Next token is token ')' (1.7: ) +Next token is token '=' (2.12: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | error -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1492: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -568. calc.at:1492: ./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - ok -./calc.at:1491: cat stderr -input: - | 1 = 2 = 3 - -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -input: - | - | +1 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1491: $PREPARSER ./calc /dev/null -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -615. torture.at:531: testing Exploding the Stack Size with Malloc ... -./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1491: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -./torture.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: - | (!!) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (10.16-11.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (12.12-13.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +630. regression.at:196: testing Rule Line Numbers ... +./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -v input.y +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -237106,11 +232828,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1491: $PREPARSER ./calc input +stdout: + { tests = {{{{{{{{{{}}}}}}}}}}; } +629. regression.at:173: ok +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1494: cat stderr stderr: +./regression.at:235: cat input.output Starting parse Entering state 0 Reading a token @@ -237118,102 +232842,64 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -237222,11 +232908,79 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + +stderr: +stdout: +625. regression.at:25: ok +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none + +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +input: +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | 1 2 +./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +630. regression.at:196: ok +631. regression.at:345: testing Mixing %token styles ... +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -Wall -o input.c input.y + +input: ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 + | 1//2 +./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -237235,102 +232989,64 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -237339,24 +233055,13 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1491: $PREPARSER ./calc input +632. regression.at:437: testing Token definitions: parse.error=detailed ... +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +stderr: stderr: Starting parse Entering state 0 @@ -237369,200 +233074,84 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1491: $PREPARSER ./calc input +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +stdout: +628. regression.at:127: ok +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror + +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./calc.at:1492: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) + $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror stderr: stdout: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: "$PERL" -ne ' +./calc.at:1492: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -237573,83 +233162,9 @@ || /\t/ )' calc.cc calc.hh -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: +633. regression.at:438: testing Token definitions: parse.error=verbose ... +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -237663,11 +233178,22 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1491: cat stderr -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1492: $PREPARSER ./calc input input: - | (#) + (#) = 2222 -./calc.at:1491: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: cat stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: Starting parse Entering state 0 @@ -237683,178 +233209,49 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' (1.3: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -237863,18 +233260,27 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence] +input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence] +stderr: +./regression.at:357: sed 's,.*/$,,' stderr 1>&2 +input: +./calc.at:1494: cat stderr + | 1//2 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -237882,11 +233288,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -237894,11 +233300,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -237920,11 +233326,11 @@ Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) + $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token @@ -237948,11 +233354,11 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) + $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token @@ -237960,11 +233366,11 @@ Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) + $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token @@ -237976,11 +233382,11 @@ Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) + $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token @@ -238012,11 +233418,11 @@ Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) + $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token @@ -238064,11 +233470,11 @@ Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) + $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token @@ -238076,11 +233482,11 @@ Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) + $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token @@ -238105,11 +233511,11 @@ Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token @@ -238148,11 +233554,11 @@ Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) + $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token @@ -238176,11 +233582,11 @@ Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) + $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token @@ -238195,11 +233601,11 @@ Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) + $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token @@ -238249,11 +233655,11 @@ Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) + $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token @@ -238283,11 +233689,11 @@ Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) + $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token @@ -238331,11 +233737,11 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) + $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token @@ -238343,11 +233749,11 @@ Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) + $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token @@ -238362,11 +233768,11 @@ Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) + $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token @@ -238385,11 +233791,11 @@ Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) + $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token @@ -238420,11 +233826,11 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) + $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token @@ -238436,11 +233842,11 @@ Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) + $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token @@ -238448,11 +233854,11 @@ Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token @@ -238484,11 +233890,11 @@ Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) + $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token @@ -238526,11 +233932,11 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) + $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token @@ -238538,11 +233944,11 @@ Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) + $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token @@ -238550,11 +233956,11 @@ Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) + $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token @@ -238576,11 +233982,11 @@ Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) + $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token @@ -238609,11 +234015,11 @@ Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) + $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token @@ -238621,11 +234027,11 @@ Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) + $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token @@ -238650,11 +234056,11 @@ Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) + $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token @@ -238669,11 +234075,11 @@ Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) + $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token @@ -238699,23 +234105,128 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stdout: -./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +634. regression.at:447: testing Characters Escapes ... +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error +input: + | error +./calc.at:1494: $PREPARSER ./calc input +input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:25.8-14: note: previous declaration + 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -238723,11 +234234,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -238735,11 +234246,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -238761,11 +234272,11 @@ Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) + $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token @@ -238789,11 +234300,11 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) + $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token @@ -238801,11 +234312,11 @@ Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) + $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token @@ -238817,11 +234328,11 @@ Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) + $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token @@ -238853,11 +234364,11 @@ Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) + $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token @@ -238905,11 +234416,11 @@ Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) + $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token @@ -238917,11 +234428,11 @@ Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) + $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token @@ -238946,11 +234457,11 @@ Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) + $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token @@ -238989,11 +234500,11 @@ Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) + $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token @@ -239007,18 +234518,7 @@ Shifting token ')' (5.4: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -5.1: ) + $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -239028,11 +234528,11 @@ Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) + $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token @@ -239047,11 +234547,11 @@ Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) + $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token @@ -239101,11 +234601,11 @@ Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) + $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token @@ -239135,11 +234635,11 @@ Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) + $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token @@ -239183,11 +234683,11 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) + $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token @@ -239195,11 +234695,11 @@ Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) + $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token @@ -239214,11 +234714,11 @@ Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) + $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token @@ -239237,11 +234737,11 @@ Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) + $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token @@ -239272,11 +234772,11 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) + $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token @@ -239288,11 +234788,11 @@ Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) + $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token @@ -239300,11 +234800,11 @@ Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) + $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token @@ -239336,11 +234836,11 @@ Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) + $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token @@ -239378,11 +234878,11 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) + $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token @@ -239390,11 +234890,11 @@ Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) + $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token @@ -239402,11 +234902,11 @@ Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) + $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token @@ -239428,11 +234928,11 @@ Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) + $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token @@ -239461,11 +234961,11 @@ Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) + $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token @@ -239473,11 +234973,11 @@ Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) + $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token @@ -239502,11 +235002,11 @@ Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) + $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token @@ -239521,11 +235021,11 @@ Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) + $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token @@ -239551,59 +235051,95 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 -Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: -stderr: - | 1 2 -./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1491: cat stderr +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./regression.at:437: sed 's,.*/$,,' stderr 1>&2 +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: -./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1492: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -input: +Cleanup: discarding lookahead token '/' (1.3: ) +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 2 stderr: - | (1 + #) = 1111 -./calc.at:1491: $PREPARSER ./calc input -./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +./calc.at:1492: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./regression.at:466: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./calc.at:1492: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) stderr: +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) +Next token is token number (1.3: 2) 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: -memory exhausted -memory exhausted +Cleanup: discarding lookahead token number (1.3: 2) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -239612,11 +235148,11 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token @@ -239633,43 +235169,49 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -239678,12 +235220,13 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -239694,22 +235237,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) + | error +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1494: cat stderr stderr: -memory exhausted -memory exhausted stderr: Starting parse Entering state 0 Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Starting parse +Entering state 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) + $1 = token "number" (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token @@ -239726,43 +235291,49 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -239771,13 +235342,12 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -614. torture.at:485: ./calc.at:1494: cat stderr - ok -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -239788,9 +235358,17 @@ }eg ' expout || exit 77 input: - | 1//2 -./calc.at:1491: cat stderr +stderr: + | 1 = 2 = 3 ./calc.at:1494: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1492: cat stderr +stderr: stderr: Starting parse Entering state 0 @@ -239803,6 +235381,60 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:25.8-14: note: previous declaration + 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token Next token is token '/' (1.2: ) Shifting token '/' (1.2: ) Entering state 22 @@ -239812,12 +235444,19 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) -input: - - | (# + 1) = 1111 -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: $PREPARSER ./calc input +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:438: sed 's,.*/$,,' stderr 1>&2 Starting parse Entering state 0 Reading a token @@ -239829,6 +235468,37 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +./calc.at:1492: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token Next token is token '/' (1.2: ) Shifting token '/' (1.2: ) Entering state 22 @@ -239838,6 +235508,37 @@ Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1491: cat stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +input: + | (1 + 1) / (1 - 1) +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: cat stderr +stdout: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./torture.at:238: $PREPARSER ./input +stderr: stderr: Starting parse Entering state 0 @@ -239846,56 +235547,902 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token Next token is token ')' (1.7: ) -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr +input: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +605. torture.at:216: ok +./calc.at:1491: $PREPARSER ./calc input +input: + | error +./calc.at:1492: $PREPARSER ./calc input +stderr: + | + | +1 +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +stderr: +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +./calc.at:1494: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./calc.at:1492: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +567. calc.at:1492: ./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + ok +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: cat stderr +./calc.at:1491: cat stderr +stderr: +./calc.at:1494: $PREPARSER ./calc /dev/null +./calc.at:1492: cat stderr +stdout: +./existing.at:74: $PREPARSER ./input +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1492: $PREPARSER ./calc input +stderr: +617. existing.at:74: ok +stderr: +stdout: +stderr: +./existing.at:74: $PREPARSER ./input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stdout: +stderr: +syntax error, unexpected '*', expecting NEWLINE or '{' or ';' +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +631. regression.at:345: ok + | + | +1 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +634. regression.at:447: ok +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +616. existing.at:74: ok +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) + +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr + +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | + | +1 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: cat stderr +./calc.at:1491: cat stderr +stderr: +input: +./calc.at:1491: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 + +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1494: $PREPARSER ./calc input +stderr: +./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -239904,11 +236451,34 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:437: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1492: cat stderr +./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +635. regression.at:480: testing Web2c Report ... +./calc.at:1492: $PREPARSER ./calc /dev/null +./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v input.y stderr: Starting parse Entering state 0 @@ -239917,56 +236487,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 Reading a token -Next token is token ')' (1.7: ) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -239975,10 +236723,20 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +636. regression.at:661: testing Web2c Actions ... +./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +./calc.at:1491: cat stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -239989,7 +236747,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./regression.at:438: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +input: +637. regression.at:812: testing Useless Tokens ... +./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -o input.c input.y +input: +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -239999,94 +236771,113 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1494: cat stderr -input: -input: - | (1 + # + 1) = 1111 - | error + | (!!) + (1 2) = 1 ./calc.at:1494: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1491: $PREPARSER ./calc input +639. regression.at:1144: testing Dancer %glr-parser ... +./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y +./calc.at:1492: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -240095,18 +236886,19 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +638. regression.at:1143: testing Dancer ... +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +640. regression.at:1145: testing Dancer lalr1.cc ... +./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y +./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.cc dancer.y +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -240115,70 +236907,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token ')' (1.11: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -240191,65 +237147,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -input: -./calc.at:1491: cat stderr - | 1 = 2 = 3 -./calc.at:1494: $PREPARSER ./calc input -input: - | (1 + 1) / (1 - 1) -./calc.at:1491: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) stderr: +./regression.at:506: cat input.output Starting parse Entering state 0 Reading a token @@ -240257,102 +237156,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) + $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 28 +Entering state 27 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -240361,146 +237256,245 @@ Entering state 6 Reading a token Now at end of input. -Shifting token end of input (2.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input (2.1: ) +Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -Starting parse -Entering state 0 + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.17: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -240513,6 +237507,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -240523,128 +237518,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ... -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -input: -./calc.at:1491: cat stderr - | - | +1 -./calc.at:1494: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -566. calc.at:1491: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ok -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) - -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1494: cat stderr -stdout: -./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -./calc.at:1494: $PREPARSER ./calc /dev/null -stderr: -stderr: -./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -memory exhausted -memory exhausted -./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr -stderr: -memory exhausted -memory exhausted -./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1494: $PREPARSER ./calc input stderr: +./regression.at:679: cat tables.c Starting parse Entering state 0 Reading a token @@ -240674,11 +237549,11 @@ Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) + $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token @@ -240686,11 +237561,11 @@ Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) + $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token @@ -240705,11 +237580,11 @@ Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) + $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token @@ -240795,11 +237670,11 @@ Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) + $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token @@ -240807,11 +237682,11 @@ Shifting token '*' (1.35: ) Entering state 21 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) + $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Reading a token @@ -240858,11 +237733,11 @@ Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) + $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token @@ -240888,12 +237763,26 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +636. regression.at:661: ok +input: stderr: + | (- *) + (1 2) = 1 Starting parse Entering state 0 Reading a token @@ -240923,11 +237812,11 @@ Shifting token '(' (1.6: ) Entering state 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) + $1 = token number (1.7: 1) -> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token @@ -240935,11 +237824,11 @@ Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) + $1 = token number (1.11: 1) -> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token @@ -240954,11 +237843,11 @@ Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) + $1 = token number (1.15: 1) -> $$ = nterm exp (1.15: 1) Entering state 29 Reading a token @@ -241044,11 +237933,11 @@ Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) + $1 = token number (1.33: 1) -> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token @@ -241056,11 +237945,11 @@ Shifting token '*' (1.35: ) Entering state 21 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) + $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Reading a token @@ -241107,11 +237996,11 @@ Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) + $1 = token number (1.46: 1) -> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token @@ -241137,11 +238026,133 @@ Entering state 6 Reading a token Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: cat stderr +input: +stderr: +635. regression.at:480: ./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 + ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. Shifting token "end of input" (2.1: ) Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -241151,11 +238162,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: cat stderr -input: | (!!) + (1 2) = 1 -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: $PREPARSER ./calc input stderr: +./calc.at:1492: cat stderr + Starting parse Entering state 0 Reading a token @@ -241194,21 +238206,21 @@ Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) + $1 = token number (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) +Next token is token number (1.11: 2) 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) Shifting token error (1.9-11: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) Reading a token Next token is token ')' (1.12: ) Entering state 11 @@ -241233,11 +238245,11 @@ Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) + $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token @@ -241263,14 +238275,134 @@ Entering state 6 Reading a token Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | (!!) + (1 2) = 1 +./calc.at:1492: $PREPARSER ./calc input +./regression.at:1144: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. Shifting token "end of input" (2.1: ) Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ... -./torture.at:548: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Starting parse Entering state 0 Reading a token @@ -241309,21 +238441,21 @@ Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) + $1 = token number (1.9: 1) -> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) +Next token is token number (1.11: 2) 1.11: syntax error, unexpected number Error: popping nterm exp (1.9: 1) Shifting token error (1.9-11: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) Reading a token Next token is token ')' (1.12: ) Entering state 11 @@ -241348,11 +238480,249 @@ Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./regression.at:1143: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stderr: +./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token @@ -241378,11 +238748,275 @@ Entering state 6 Reading a token Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +input: +./regression.at:1145: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS + | (* *) + (*) + (*) + +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: cat stderr +stdout: +./torture.at:141: $PREPARSER ./input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. Shifting token "end of input" (2.1: ) Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -241394,10 +239028,14 @@ }eg ' expout || exit 77 ./calc.at:1494: cat stderr -input: | (- *) + (1 2) = 1 -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +604. torture.at:132: ok +./calc.at:1492: cat stderr +input: stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1494: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -241441,21 +239079,21 @@ Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) + $1 = token number (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) +Next token is token number (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Shifting token error (1.10-12: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) Reading a token Next token is token ')' (1.13: ) Entering state 11 @@ -241480,11 +239118,11 @@ Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) + $1 = token number (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token @@ -241510,10 +239148,79 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (- *) + (1 2) = 1 +./calc.at:1492: $PREPARSER ./calc input ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -241559,21 +239266,21 @@ Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) + $1 = token number (1.10: 1) -> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) +Next token is token number (1.12: 2) 1.12: syntax error, unexpected number Error: popping nterm exp (1.10: 1) Shifting token error (1.10-12: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) Reading a token Next token is token ')' (1.13: ) Entering state 11 @@ -241598,11 +239305,11 @@ Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) + $1 = token number (1.17: 1) -> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token @@ -241628,11 +239335,385 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + +stderr: + | 1 + 2 * 3 + !- ++ +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +641. regression.at:1220: testing Expecting two tokens ... +./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -241642,10 +239723,217 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: cat stderr +./calc.at:1491: cat stderr +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +642. regression.at:1221: testing Expecting two tokens %glr-parser ... +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y +./calc.at:1492: cat stderr input: | (* *) + (*) + (*) -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -241758,11 +240046,389 @@ Entering state 6 Reading a token Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1494: cat stderr + | (* *) + (*) + (*) +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:917: cat tables.c +input: + | (#) + (#) = 2222 +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: cat stderr +./regression.at:1220: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS +stderr: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1491: $PREPARSER ./calc input +stderr: +stdout: +./regression.at:437: $PREPARSER ./input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. Shifting token "end of input" (2.1: ) Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +637. regression.at:812: ok +syntax error, unexpected a, expecting ∃¬∩∪∀ +./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: Starting parse Entering state 0 @@ -241875,10 +240541,200 @@ Entering state 6 Reading a token Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: +./regression.at:1221: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS +stderr: +input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +stdout: + | 1 + 2 * 3 + !- ++ +./calc.at:1491: $PREPARSER ./calc input +./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. Shifting token "end of input" (2.1: ) Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +632. regression.at:437: ok +643. regression.at:1222: testing Expecting two tokens lalr1.cc ... +./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.cc expect2.y + +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1492: cat stderr + ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -241891,17 +240747,17 @@ ' expout || exit 77 ./calc.at:1494: cat stderr input: - | 1 + 2 * 3 + !+ ++ + | (1 + #) = 1111 ./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -241909,11 +240765,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -241921,11 +240777,170 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !+ ++ +./calc.at:1492: $PREPARSER ./calc input +stderr: +./calc.at:1491: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -241959,16 +240974,23 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:438: $PREPARSER ./input +input: stderr: +stderr: + | (#) + (#) = 2222 +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -241976,11 +240998,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -241988,11 +241010,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -242026,18 +241048,100 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" input: +stderr: +./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) | 1 + 2 * 3 + !- ++ -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1492: $PREPARSER ./calc input +./regression.at:1222: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -242045,11 +241149,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -242057,11 +241161,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -242095,16 +241199,128 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +633. regression.at:438: ok stderr: +stderr: +input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token @@ -242112,11 +241328,11 @@ Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token @@ -242124,11 +241340,11 @@ Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token @@ -242162,8 +241378,123 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./torture.at:140: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1494: "$PERL" -pi -e 'use strict; +stdout: +644. regression.at:1230: testing Braced code in declaration in rules section ... +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +645. regression.at:1291: testing String alias declared after use ... +./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -242173,10 +241504,97 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples + +stdout: +./existing.at:808: grep '^State.*conflicts:' input.output +./calc.at:1492: cat stderr +./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' ./calc.at:1494: cat stderr +./calc.at:1491: cat stderr +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: - | (#) + (#) = 2222 + | (# + 1) = 1111 ./calc.at:1494: $PREPARSER ./calc input +input: + | (1 + #) = 1111 +input: +./calc.at:1491: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:808: grep '^State.*conflicts:' input.output stderr: Starting parse Entering state 0 @@ -242242,11 +241660,11 @@ Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) + $1 = token number (1.13-16: 2222) -> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token @@ -242271,11 +241689,90 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -242318,50 +241815,219 @@ Next token is token error (1.8: ) Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: cat stderr +stderr: +stdout: +input: +stderr: +./regression.at:1143: $PREPARSER ./dancer + | (1 + #) = 1111 +./calc.at:1492: $PREPARSER ./calc input +./regression.at:1262: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -242370,24 +242036,11 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1494: $PREPARSER ./calc input +645. regression.at:1291: ok stderr: Starting parse Entering state 0 @@ -242396,11 +242049,11 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token @@ -242433,11 +242086,11 @@ Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token @@ -242462,11 +242115,13 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected ':' +./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -242475,11 +242130,11 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token @@ -242512,11 +242167,11 @@ Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token @@ -242541,10 +242196,23 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +638. regression.at:1143: ok + +./calc.at:1491: cat stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -242555,11 +242223,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr ./calc.at:1494: cat stderr -./torture.at:237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + input: | (# + 1) = 1111 -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +646. regression.at:1314: testing Extra lookahead sets in report ... +./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y +input: stderr: Starting parse Entering state 0 @@ -242578,8 +242260,8 @@ Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -242597,11 +242279,11 @@ Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token @@ -242626,12 +242308,18 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 +stderr: +./calc.at:1494: $PREPARSER ./calc input +./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output stderr: Starting parse Entering state 0 @@ -242650,8 +242338,8 @@ Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -242669,11 +242357,11 @@ Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) + $1 = token number (1.11-14: 1111) -> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token @@ -242698,24 +242386,15 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +stdout: +646. regression.at:1314: ok stderr: Starting parse Entering state 0 @@ -242724,6 +242403,75 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.2: 1) Shifting token "number" (1.2: 1) Entering state 1 @@ -242800,8 +242548,92 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +647. regression.at:1355: testing Token number in precedence declaration ... +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -242885,6 +242717,18 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: cat stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -242895,12 +242739,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./calc.at:1491: cat stderr ./calc.at:1494: cat stderr input: | (1 + 1) / (1 - 1) ./calc.at:1494: $PREPARSER ./calc input +648. regression.at:1408: testing parse-gram.y: LALR = IELR ... +./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y +input: +input: +stderr: +stdout: +./regression.at:1220: $PREPARSER ./expect2 stderr: + | (1 + # + 1) = 1111 Starting parse Entering state 0 Reading a token @@ -243016,6 +242868,10 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: $PREPARSER ./calc input +stderr: + | (1 + # + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -243025,6 +242881,93 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +syntax error, unexpected '+', expecting A or B +./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.2: 1) Shifting token "number" (1.2: 1) Entering state 1 @@ -243133,6 +243076,274 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +641. regression.at:1220: ok +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stdout: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -243143,214 +243354,841 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./existing.at:808: $PREPARSER ./input +./calc.at:1492: cat stderr +649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ... +./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1491: cat stderr +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./existing.at:1460: $PREPARSER ./input +input: +stderr: ./calc.at:1494: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1492: $PREPARSER ./calc input +stdout: +stderr: +syntax error, unexpected LEFT +./existing.at:808: $PREPARSER ./input +stderr: +650. regression.at:1504: testing parse.error=verbose overflow ... +620. existing.at:808: input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other] +input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence] +./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + ok +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1388: sed 's,.*/$,,' stderr 1>&2 +stderr: +input: +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: + | (1 + 1) / (1 - 1) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: $PREPARSER ./calc input 569. calc.at:1494: ok +622. existing.at:1460: ok + +./regression.at:1482: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +619. existing.at:808: ok +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./regression.at:1611: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +652. regression.at:1739: testing LAC: Memory exhaustion ... +651. regression.at:1628: testing LAC: Exploratory stack ... +./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y +653. regression.at:1874: testing Lex and parse params: yacc.c ... +./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +654. regression.at:1875: testing Lex and parse params: glr.c ... +./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1492: cat stderr +./regression.at:1874: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1491: cat stderr +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none +568. calc.at:1492: ok +./regression.at:1713: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./regression.at:1263: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 20): +-> $$ = nterm start () +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token 'a' (PRINTER) +syntax error, unexpected 'a', expecting end of file +Error: popping nterm start () +Stack now 0 +Cleanup: discarding lookahead token 'a' (PRINTER) +DESTRUCTOR +Stack now 0 +./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +566. calc.at:1491: ok + +./regression.at:1875: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +644. regression.at:1230: ok +./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y +./regression.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ... -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./existing.at:1460: $PREPARSER ./input +655. regression.at:1876: testing Lex and parse params: lalr1.cc ... +./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +656. regression.at:1877: testing Lex and parse params: glr.cc ... stderr: -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stdout: +./regression.at:1144: $PREPARSER ./dancer +./regression.at:1876: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +623. existing.at:1460: ok stderr: -./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +syntax error, unexpected ':' +./regression.at:1144: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +657. regression.at:1878: testing Lex and parse params: glr2.cc ... +./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y + stderr: -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./regression.at:1483: $PREPARSER ./input stderr: -./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B +syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B +639. regression.at:1144: ok +./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1877: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +649. regression.at:1430: ok +./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -memory exhausted -memory exhausted -./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +658. regression.at:1889: testing stdio.h is not needed ... +./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:1613: $PREPARSER ./input stderr: +syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B or 123456789112345678921234567893123456789412345678951234567896123C +syntax error, unexpected 'd' +syntax error memory exhausted -memory exhausted -615. torture.at:531: ok +./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./regression.at:1874: $PREPARSER ./input +stderr: +./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +650. regression.at:1504: ok +653. regression.at:1874: ok stderr: -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -619. existing.at:808: testing GNU Cim Grammar: LALR(1) ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stdout: +./regression.at:1221: $PREPARSER ./expect2 + +stderr: +syntax error, unexpected '+', expecting A or B +./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +642. regression.at:1221: ok +./regression.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +659. push.at:25: testing Memory Leak for Early Deletion ... +./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +660. push.at:84: testing Multiple impure instances ... +./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +661. push.at:145: testing Unsupported Skeletons ... +./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +662. push.at:167: testing Pstate reuse ... +./push.at:276: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./regression.at:1420: diff lalr.c ielr.c +./push.at:75: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +648. regression.at:1408: ok +./push.at:134: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +663. c++.at:26: testing C++ Locations Unit Tests ... +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stdout: -./torture.at:238: $PREPARSER ./input +./regression.at:1394: $PREPARSER ./input + stderr: -./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -605. torture.at:216: ok +./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +647. regression.at:1355: ok +661. push.at:145: ok + +./push.at:276: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -620. existing.at:808: testing GNU Cim Grammar: IELR(1) ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stderr: stdout: -./torture.at:141: $PREPARSER ./input +658. regression.at:1889: ok + +664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ... +./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.yy stderr: -./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -604. torture.at:132: ok +stdout: +665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ... +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.yy +666. c++.at:566: testing Variants lalr1.cc ... +./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +667. c++.at:567: testing Variants lalr1.cc parse.assert ... +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stdout: +./regression.at:1713: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Werror +stderr: +./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1713: grep 'syntax error,' stderr.txt +stderr: +./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +stdout: +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./push.at:76: $PREPARSER ./input +stderr: +./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +659. push.at:25: ok +stderr: +stdout: -621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./push.at:134: $PREPARSER ./input +stderr: +./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +input.yy:16.33-34: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 16 | | "twice" exp { $$ = $2 + $2; } + | ^~ +input.yy:17.33-36: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } + | ^~~~ +input.yy:17.40-41: error: multiple occurrences of $2 with api.value.automove [-Werror=other] + 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } + | ^~ +./c++.at:263: sed 's,.*/$,,' stderr 1>&2 +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=error +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./regression.at:1714: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./push.at:135: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ... +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none ./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y stderr: -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stdout: +./push.at:277: ./input +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +662. push.at:167: ok +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS + +665. c++.at:247: ok + stderr: -input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stdout: -./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./push.at:135: $PREPARSER ./input stderr: -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./push.at:135: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stdout: -./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1875: $PREPARSER ./input +stderr: +660. push.at:84: ok +./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +654. regression.at:1875: ok +669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ... +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y + + +stderr: +670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ... +stdout: +./regression.at:1145: $PREPARSER ./dancer +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +syntax error, unexpected ':' +./regression.at:1145: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +640. regression.at:1145: ok + +671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ... +./torture.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ... +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ... +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./regression.at:1772: $PREPARSER ./input --debug +stderr: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +LAC: initial context established for "end of file" +LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) +memory exhausted +Cleanup: discarding lookahead token "end of file" () +Stack now 0 +./regression.at:1772: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y +./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./regression.at:1714: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +stderr: +./regression.at:1714: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1714: grep 'syntax error,' stderr.txt +./regression.at:1714: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1714: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1714: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +./regression.at:1715: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y +./regression.at:1715: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./regression.at:1222: $PREPARSER ./expect2 +stderr: +syntax error, unexpected '+', expecting A or B +./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +643. regression.at:1222: ok + +674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ... +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./calc.at:1494: "$PERL" -ne ' @@ -243364,10 +244202,9 @@ || /\t/ )' calc.cc calc.hh -stderr: -stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS input: -./existing.at:74: $PREPARSER ./input +stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -243382,11 +244219,11 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1494: $PREPARSER ./calc input +stdout: +./regression.at:1876: $PREPARSER ./input stderr: -syntax error, unexpected '*', expecting NEWLINE or '{' or ';' -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1876: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none Starting parse Entering state 0 Reading a token @@ -244225,6 +245062,10 @@ Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +stderr: +655. regression.at:1876: ./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + ok Starting parse Entering state 0 Reading a token @@ -245061,7 +245902,6 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -616. existing.at:74: ok input: | 1 2 ./calc.at:1494: $PREPARSER ./calc input @@ -245081,6 +245921,7 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token number (1.3: 2) + ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -245108,7 +245949,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - ./calc.at:1494: cat stderr input: | 1//2 @@ -245135,6 +245975,7 @@ Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '/' (1.3: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 stderr: Starting parse Entering state 0 @@ -245177,7 +246018,6 @@ Next token is token invalid token (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token (1.1: ) -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -245186,8 +246026,7 @@ Next token is token invalid token (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token (1.1: ) -622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -245202,6 +246041,7 @@ input: | 1 = 2 = 3 ./calc.at:1494: $PREPARSER ./calc input +675. c++.at:584: testing Variants and Typed Midrule Actions ... stderr: Starting parse Entering state 0 @@ -245263,6 +246103,8 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -245319,6 +246161,7 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -245329,21 +246172,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: ./calc.at:1494: cat stderr -stdout: -./existing.at:74: $PREPARSER ./input ./calc.at:1494: $PREPARSER ./calc /dev/null stderr: -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token end of input (1.1: ) -617. existing.at:74: ok ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -245362,12 +246199,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - ./calc.at:1494: cat stderr +stderr: +stdout: input: +./regression.at:1877: $PREPARSER ./input | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1494: $PREPARSER ./calc input stderr: +stderr: +./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +656. regression.at:1877: ok Starting parse Entering state 0 Reading a token @@ -245864,6 +246706,10 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + +stderr: +stdout: +./regression.at:1715: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -245874,10 +246720,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: ./calc.at:1494: cat stderr +./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1715: grep 'syntax error,' stderr.txt input: | (!!) + (1 2) = 1 ./calc.at:1494: $PREPARSER ./calc input +./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt stderr: Starting parse Entering state 0 @@ -245990,6 +246841,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./regression.at:1715: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -246103,6 +246955,9 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -246362,12 +247217,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./regression.at:1716: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./calc.at:1494: cat stderr +676. c++.at:794: testing Doxygen Public Documentation ... +./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy input: | (* *) + (*) + (*) ./calc.at:1494: $PREPARSER ./calc input -623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: Starting parse Entering state 0 @@ -246616,6 +247472,7 @@ | 1 + 2 * 3 + !+ ++ ./calc.at:1494: $PREPARSER ./calc input stderr: +./c++.at:794: doxygen --version || exit 77 Starting parse Entering state 0 Reading a token @@ -246681,7 +247538,14 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +--- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr 2025-05-05 18:55:09.544300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found +stdout: +676. c++.at:794: skipped (c++.at:794) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: Starting parse Entering state 0 @@ -247095,6 +247959,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +677. c++.at:795: testing Doxygen Private Documentation ... +./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -247266,6 +248132,10 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stdout: +./c++.at:795: doxygen --version || exit 77 +./regression.at:1788: $PREPARSER ./input --debug ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -247276,10 +248146,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: cat stderr +--- /dev/null 2025-04-29 17:46:35.000000000 +0000 ++++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr 2025-05-05 18:55:09.712300099 +0000 +@@ -0,0 +1 @@ ++/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found +stderr: +stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" () +LAC: initial context established for "invalid token" +LAC: checking lookahead "invalid token": Always Err +Constructing syntax error message +LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) +syntax error +memory exhausted +Cleanup: discarding lookahead token "invalid token" () +Stack now 0 +./regression.at:1788: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +677. c++.at:795: ./calc.at:1494: cat stderr + skipped (c++.at:795) +652. regression.at:1739: ok input: | (# + 1) = 1111 ./calc.at:1494: $PREPARSER ./calc input + stderr: Starting parse Entering state 0 @@ -247350,6 +248243,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -247431,8 +248325,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stdout: +./c++.at:92: $PREPARSER ./input ./calc.at:1494: cat stderr +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y | (1 + # + 1) = 1111 ./calc.at:1494: $PREPARSER ./calc input stderr: @@ -247520,7 +248421,6 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none stderr: Starting parse Entering state 0 @@ -247605,7 +248505,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -247616,11 +248515,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +678. c++.at:848: testing Relative namespace references ... ./calc.at:1494: cat stderr +./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy input: | (1 + 1) / (1 - 1) ./calc.at:1494: $PREPARSER ./calc input +679. c++.at:854: testing Absolute namespace references ... +./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Starting parse Entering state 0 Reading a token @@ -247853,7 +248757,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -247866,674 +248769,100 @@ ' expout || exit 77 ./calc.at:1494: cat stderr 570. calc.at:1494: ok +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 -./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./existing.at:808: grep '^State.*conflicts:' input.output -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./existing.at:808: grep '^State.*conflicts:' input.output -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stderr: -stdout: -./existing.at:808: $PREPARSER ./input -stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -620. existing.at:808: ok -stderr: -stdout: -./existing.at:808: $PREPARSER ./input -stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -619. existing.at:808: ok - -625. regression.at:25: testing Trivial grammars ... -./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -626. regression.at:55: testing YYSTYPE typedef ... -./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:44: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./regression.at:74: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stderr: -stdout: -626. regression.at:55: ok -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y - -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -627. regression.at:85: testing Early token definitions with --yacc ... -./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc -o input.c input.y -stderr: -stdout: -./regression.at:45: $CC $CFLAGS $CPPFLAGS -c -o input.o -DYYDEBUG -c input.c -./regression.at:116: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stderr: -stdout: -627. regression.at:85: ok - -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -628. regression.at:127: testing Early token definitions without --yacc ... -./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:162: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stderr: -input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] -stdout: -./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -stderr: -stdout: -625. regression.at:25: ok -./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y - -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./torture.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -628. regression.at:127: ok - -629. regression.at:173: testing Braces parsing ... -./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c -stdout: - { tests = {{{{{{{{{{}}}}}}}}}}; } -629. regression.at:173: ok - -630. regression.at:196: testing Rule Line Numbers ... -./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -v input.y -631. regression.at:345: testing Mixing %token styles ... -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -Wall -o input.c input.y -./regression.at:235: cat input.output -630. regression.at:196: ok - -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror -stderr: -stdout: -./existing.at:1460: $PREPARSER ./input -632. regression.at:437: testing Token definitions: parse.error=detailed ... -stderr: -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -syntax error, unexpected LEFT -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence] -input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence] -./regression.at:357: sed 's,.*/$,,' stderr 1>&2 -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error -622. existing.at:1460: ok - -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -633. regression.at:438: testing Token definitions: parse.error=verbose ... -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none -stderr: -input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] -stdout: -./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -stderr: -input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:25.8-14: note: previous declaration - 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -./regression.at:437: sed 's,.*/$,,' stderr 1>&2 -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -631. regression.at:345: ok - -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:25.8-14: note: previous declaration - 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -./regression.at:438: sed 's,.*/$,,' stderr 1>&2 -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -634. regression.at:447: testing Characters Escapes ... -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:466: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./regression.at:437: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -stderr: -stdout: -634. regression.at:447: ok -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none - -./regression.at:438: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./existing.at:1460: $PREPARSER ./input -stderr: -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -623. existing.at:1460: ok - -635. regression.at:480: testing Web2c Report ... -./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v input.y -./regression.at:506: cat input.output -635. regression.at:480: ok - -636. regression.at:661: testing Web2c Actions ... -./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -stderr: -stdout: -./regression.at:437: $PREPARSER ./input -stderr: -syntax error, unexpected a, expecting ∃¬∩∪∀ -./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -632. regression.at:437: ok - -./regression.at:679: cat tables.c -637. regression.at:812: testing Useless Tokens ... -./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -o input.c input.y -636. regression.at:661: ok - -stderr: -stdout: -./regression.at:438: $PREPARSER ./input -stderr: -syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" -./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -633. regression.at:438: ok - -638. regression.at:1143: testing Dancer ... -./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y -639. regression.at:1144: testing Dancer %glr-parser ... -./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y -640. regression.at:1145: testing Dancer lalr1.cc ... -./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.cc dancer.y -./regression.at:1143: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS -./regression.at:1144: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS -./regression.at:1145: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS -./regression.at:917: cat tables.c -637. regression.at:812: ok - -641. regression.at:1220: testing Expecting two tokens ... -./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y -stderr: -stdout: -./regression.at:1143: $PREPARSER ./dancer -stderr: -syntax error, unexpected ':' -./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -638. regression.at:1143: ok - -./regression.at:1220: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS -642. regression.at:1221: testing Expecting two tokens %glr-parser ... -./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y -./regression.at:1221: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS -stderr: -stdout: -./regression.at:1220: $PREPARSER ./expect2 -stderr: -syntax error, unexpected '+', expecting A or B -./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -641. regression.at:1220: ok - -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -643. regression.at:1222: testing Expecting two tokens lalr1.cc ... -./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.cc expect2.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./regression.at:1222: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS -stderr: -stdout: -./regression.at:1144: $PREPARSER ./dancer -stderr: -syntax error, unexpected ':' -./regression.at:1144: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -639. regression.at:1144: ok - -644. regression.at:1230: testing Braced code in declaration in rules section ... -./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:1262: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +680. c++.at:863: testing Syntactically invalid namespace references ... +./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: ./existing.at:74: $PREPARSER ./input +./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: ./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -618. existing.at:74: ok - -stderr: -stdout: -./regression.at:1221: $PREPARSER ./expect2 -645. regression.at:1291: testing String alias declared after use ... -./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -syntax error, unexpected '+', expecting A or B -./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -642. regression.at:1221: ok - -645. regression.at:1291: ok - -stderr: -stdout: -./regression.at:1263: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 20): --> $$ = nterm start () -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token 'a' (PRINTER) -syntax error, unexpected 'a', expecting end of file -Error: popping nterm start () -Stack now 0 -Cleanup: discarding lookahead token 'a' (PRINTER) -DESTRUCTOR -Stack now 0 -./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -644. regression.at:1230: ok - -646. regression.at:1314: testing Extra lookahead sets in report ... -./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y -647. regression.at:1355: testing Token number in precedence declaration ... -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y -./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output -646. regression.at:1314: ok - -stderr: -stdout: -./regression.at:1145: $PREPARSER ./dancer -stderr: -syntax error, unexpected ':' -./regression.at:1145: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -648. regression.at:1408: testing parse-gram.y: LALR = IELR ... -./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y -640. regression.at:1145: ok -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror - -stderr: -input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other] -input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence] -./regression.at:1388: sed 's,.*/$,,' stderr 1>&2 -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error -649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ... -./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -650. regression.at:1504: testing parse.error=verbose overflow ... -./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none -./regression.at:1482: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none -./regression.at:1611: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y -stderr: -stdout: -./regression.at:1613: $PREPARSER ./input -stderr: -syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B or 123456789112345678921234567893123456789412345678951234567896123C -syntax error, unexpected 'd' -syntax error -memory exhausted -./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./regression.at:1483: $PREPARSER ./input -650. regression.at:1504: ok -stderr: -syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B -syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B -./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -649. regression.at:1430: ok - - stderr: stdout: -./regression.at:1394: $PREPARSER ./input -stderr: -./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -647. regression.at:1355: ok -651. regression.at:1628: testing LAC: Exploratory stack ... -./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -652. regression.at:1739: testing LAC: Memory exhaustion ... -./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y - -./regression.at:1713: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1420: diff lalr.c ielr.c -653. regression.at:1874: testing Lex and parse params: yacc.c ... -./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -648. regression.at:1408: ok +./regression.at:1716: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +618. existing.at:74: ok -./regression.at:1874: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./regression.at:1222: $PREPARSER ./expect2 +./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: -syntax error, unexpected '+', expecting A or B -./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -643. regression.at:1222: ok +./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1716: grep 'syntax error,' stderr.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +681. c++.at:884: testing Syntax error discarding no lookahead ... +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +680. c++.at:863: ok -654. regression.at:1875: testing Lex and parse params: glr.c ... -./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:1875: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -655. regression.at:1876: testing Lex and parse params: lalr1.cc ... -./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./regression.at:1876: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./regression.at:1719: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./regression.at:1874: $PREPARSER ./input -stderr: -./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -653. regression.at:1874: ok - +682. c++.at:1064: testing Syntax error as exception: lalr1.cc ... +./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -656. regression.at:1877: testing Lex and parse params: glr.cc ... -./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./regression.at:1877: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./regression.at:1713: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -stderr: -./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1713: grep 'syntax error,' stderr.txt -./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt stderr: stdout: -./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -./regression.at:1714: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:235: $PREPARSER ./list +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: stdout: -./regression.at:1875: $PREPARSER ./input +./regression.at:1878: $PREPARSER ./input +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -654. regression.at:1875: ok +./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +657. regression.at:1878: ok +683. c++.at:1065: testing Syntax error as exception: glr.cc ... +./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: -./regression.at:1772: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -LAC: initial context established for "end of file" -LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) -memory exhausted -Cleanup: discarding lookahead token "end of file" () -Stack now 0 -./regression.at:1772: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y -./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -657. regression.at:1878: testing Lex and parse params: glr2.cc ... -./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: +./c++.at:566: $here/modern stdout: -./regression.at:1714: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +Modern C++: 201703 +./c++.at:566: $PREPARSER ./list stderr: -./regression.at:1714: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1714: grep 'syntax error,' stderr.txt -./regression.at:1714: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./regression.at:1714: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1714: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -./regression.at:1715: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -./regression.at:1715: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' stderr: input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] @@ -248623,216 +248952,105 @@ input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -stdout: -./regression.at:1876: $PREPARSER ./input -stderr: -./regression.at:1876: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -655. regression.at:1876: ok - -658. regression.at:1889: testing stdio.h is not needed ... -./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: -./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -658. regression.at:1889: ok - -stderr: -stdout: -./regression.at:1715: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -stderr: -./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1715: grep 'syntax error,' stderr.txt -./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1715: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -659. push.at:25: testing Memory Leak for Early Deletion ... -./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./regression.at:1716: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./push.at:75: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -./regression.at:1877: $PREPARSER ./input -stderr: -./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -656. regression.at:1877: ok - -stderr: -stdout: -./push.at:76: $PREPARSER ./input -stderr: -./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -659. push.at:25: ok - -660. push.at:84: testing Multiple impure instances ... -./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: -./torture.at:395: $PREPARSER ./input -stderr: -./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./regression.at:1788: $PREPARSER ./input --debug -613. torture.at:385: ok -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" () -LAC: initial context established for "invalid token" -LAC: checking lookahead "invalid token": Always Err -Constructing syntax error message -LAC: checking lookahead "end of file": R2 G3 R2 G5 R2 G6 R2 G7 R2 G8 R2 G9 R2 G10 R2 G11 R2 (max size exceeded) -syntax error -memory exhausted -Cleanup: discarding lookahead token "invalid token" () -Stack now 0 -./regression.at:1788: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./push.at:134: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -652. regression.at:1739: ok - -661. push.at:145: testing Unsupported Skeletons ... -./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -662. push.at:167: testing Pstate reuse ... -./push.at:276: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -661. push.at:145: ok -663. c++.at:26: testing C++ Locations Unit Tests ... -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - -./push.at:276: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ... -./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.yy stderr: stdout: -./push.at:134: $PREPARSER ./input +./c++.at:849: $PREPARSER ./input stderr: -./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./push.at:135: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./regression.at:1716: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -stderr: -./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1716: grep 'syntax error,' stderr.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y -./regression.at:1719: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./push.at:135: $PREPARSER ./input -stderr: -./push.at:135: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -660. push.at:84: ok - +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./push.at:277: ./input -662. push.at:167: ok - -665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ... -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.yy -666. c++.at:566: testing Variants lalr1.cc ... -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Werror -stderr: -input.yy:16.33-34: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 16 | | "twice" exp { $$ = $2 + $2; } - | ^~ -input.yy:17.33-36: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } - | ^~~~ -input.yy:17.40-41: error: multiple occurrences of $2 with api.value.automove [-Werror=other] - 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } - | ^~ -./c++.at:263: sed 's,.*/$,,' stderr 1>&2 -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=error -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none -665. c++.at:247: ok - -667. c++.at:567: testing Variants lalr1.cc parse.assert ... -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:92: $PREPARSER ./input stderr: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./regression.at:1878: $PREPARSER ./input +./c++.at:855: $PREPARSER ./input stderr: -./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -657. regression.at:1878: ok - -668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ... +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./regression.at:1719: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt stderr: -./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1719: grep 'syntax error,' stderr.txt -./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y -651. regression.at:1628: skipped (regression.at:1727) - -669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ... -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stdout: -./c++.at:92: $PREPARSER ./input +./c++.at:659: $PREPARSER ./input stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./c++.at:566: $here/modern +./c++.at:567: $here/modern stdout: Modern C++: 201703 -./c++.at:566: $PREPARSER ./list +./c++.at:567: $PREPARSER ./list +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: Destroy: "0" Destroy: "0" @@ -248855,19 +249073,30 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:567: $here/modern +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./c++.at:570: $here/modern +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: Modern C++: 201703 -./c++.at:567: $PREPARSER ./list +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -248890,66 +249119,9 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 1876 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 144 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./c++.at:568: $here/modern @@ -248979,183 +249151,14 @@ Destroy: () Destroy: (0, 1, 2, 4, 6) ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:569: $here/modern +./c++.at:572: $here/modern stdout: Modern C++: 201703 -./c++.at:569: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:566: $here/modern -stdout: -Legac++ -./c++.at:566: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:567: $here/modern -stdout: -Legac++ -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:568: $here/modern -stderr: -stdout: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -Legac++ -./c++.at:568: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $PREPARSER ./list ======== Testing with C++ standard flags: '' ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -stderr: -stdout: -./c++.at:566: $here/modern -stdout: -Legac++ -./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249178,128 +249181,18 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./c++.at:569: $here/modern -stdout: -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -Legac++ -./c++.at:569: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:567: $here/modern -stdout: -Legac++ -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stdout: -./existing.at:1460: $PREPARSER ./input -stderr: -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -624. existing.at:1460: ok - -670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ... -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +Modern C++: 201703 +./c++.at:569: $PREPARSER ./list ./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:568: $here/modern -stdout: -Legac++ -./c++.at:568: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249322,15 +249215,11 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -./c++.at:569: $here/modern +./c++.at:571: $here/modern stdout: -Legac++ -./c++.at:569: $PREPARSER ./list +Modern C++: 201703 +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249353,154 +249242,44 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ======== Testing with C++ standard flags: '' ./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:566: $here/modern -stdout: -Modern C++: 201103 -./c++.at:566: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:235: $PREPARSER ./list +./c++.at:849: $PREPARSER ./input stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:567: $here/modern -stdout: -Modern C++: 201103 -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -663. c++.at:26: ok - -671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ... -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./regression.at:1719: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1719: grep 'syntax error,' stderr.txt +./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y stderr: stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: +./c++.at:573: $here/modern stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -stdout: -./c++.at:570: $here/modern -stdout: Modern C++: 201703 -./c++.at:570: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:566: $here/modern +./c++.at:573: $PREPARSER ./list stdout: -Modern C++: 201402 -./c++.at:566: $PREPARSER ./list stderr: +./c++.at:855: $PREPARSER ./input Destroy: "0" Destroy: "0" Destroy: 1 @@ -249522,232 +249301,55 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:568: $here/modern -stdout: -Modern C++: 201103 -./c++.at:568: $PREPARSER ./list stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:567: $here/modern -stderr: -stdout: -Modern C++: 201402 -./c++.at:567: $PREPARSER ./list -stdout: -./c++.at:569: $here/modern -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -stdout: -Modern C++: 201103 -./c++.at:569: $PREPARSER ./list -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +651. regression.at:1628: skipped (regression.at:1727) + +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +684. c++.at:1066: testing Syntax error as exception: glr2.cc ... +./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 201703 -./c++.at:571: $PREPARSER ./list +./c++.at:92: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./c++.at:570: $here/modern -stdout: -Legac++ -./c++.at:570: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1064: $PREPARSER ./input < in stderr: stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./torture.at:395: $PREPARSER ./input stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +613. torture.at:385: ok stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:574: $here/modern stderr: +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: -./c++.at:566: $here/modern -stdout: +error: invalid expression +caught error +error: invalid character +caught error Modern C++: 201703 -./c++.at:566: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:568: $here/modern -stdout: -Modern C++: 201402 -./c++.at:568: $PREPARSER ./list +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $PREPARSER ./list stderr: + Destroy: "" Destroy: "" Destroy: 1 @@ -249769,243 +249371,133 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) +./c++.at:1064: $PREPARSER ./input < in +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:567: $here/modern -stdout: -Modern C++: 201703 -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:569: $here/modern -stdout: -stderr: -Modern C++: 201402 -./c++.at:569: $PREPARSER ./list -stdout: +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:1064: $PREPARSER ./input < in stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +685. c++.at:1360: testing Exception safety with error recovery ... +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS stderr: stdout: ./c++.at:235: $PREPARSER ./list stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -664. c++.at:107: ok stderr: stdout: - -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ... +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:941: $PREPARSER ./input ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:570: $here/modern -stdout: -Legac++ -./c++.at:570: $PREPARSER ./list +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern -stdout: -Legac++ -./c++.at:571: $PREPARSER ./list +./c++.at:849: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:855: $PREPARSER ./input stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:568: $here/modern -stdout: -Modern C++: 201703 -./c++.at:568: $PREPARSER ./list +./c++.at:659: $PREPARSER ./input stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in stderr: -stdout: -./c++.at:567: $here/modern -stdout: -Modern C++: 202002 -./c++.at:567: $PREPARSER ./list +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: ./c++.at:566: $here/modern stdout: -Modern C++: 202002 +Legac++ ./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" @@ -250030,209 +249522,58 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:569: $here/modern -stdout: -Modern C++: 201703 -./c++.at:569: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:570: $here/modern -stdout: -Modern C++: 201103 -./c++.at:570: $PREPARSER ./list -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:1360: ./exceptions || exit 77 stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Inner caught +Outer caught +./c++.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:572: $here/modern -stdout: -Modern C++: 201703 -./c++.at:572: $PREPARSER ./list -stderr: +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) stdout: -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern -stdout: -Legac++ -./c++.at:571: $PREPARSER ./list +./c++.at:941: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./existing.at:808: $PREPARSER ./input -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: $PREPARSER ./input stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -621. existing.at:808: ok - -673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ... +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:566: $here/modern +./c++.at:855: $PREPARSER ./input stdout: -Modern C++: 202100 -./c++.at:566: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -666. c++.at:566: ok - stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ... +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:567: $here/modern stdout: -Modern C++: 202100 +Legac++ ./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" @@ -250257,55 +249598,45 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -667. c++.at:567: ok - +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:568: $here/modern -stdout: -Modern C++: 202002 -./c++.at:568: $PREPARSER ./list +./c++.at:92: $PREPARSER ./input stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -675. c++.at:584: testing Variants and Typed Midrule Actions ... -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:570: $here/modern +./c++.at:568: $here/modern stdout: -Modern C++: 201402 -./c++.at:570: $PREPARSER ./list +Legac++ +./c++.at:568: $PREPARSER ./list stderr: +stderr: +stdout: Destroy: "0" Destroy: "0" Destroy: 1 @@ -250327,19 +249658,19 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:572: $here/modern +./c++.at:570: $here/modern stdout: Legac++ -./c++.at:572: $PREPARSER ./list +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250362,24 +249693,30 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:571: $here/modern +stderr: stdout: -Modern C++: 201103 +./c++.at:569: $here/modern +stdout: +stdout: +Legac++ ./c++.at:571: $PREPARSER ./list +Legac++ stderr: -stderr: +./c++.at:569: $PREPARSER ./list Destroy: "0" Destroy: "0" Destroy: 1 @@ -250401,15 +249738,10 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stdout: -./c++.at:569: $here/modern ./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -Modern C++: 202002 -./c++.at:569: $PREPARSER ./list -stderr: ======== Testing with C++ standard flags: '' ./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: Destroy: "0" Destroy: "0" Destroy: 1 @@ -250432,16 +249764,12 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:573: $here/modern +./c++.at:572: $here/modern stdout: -Modern C++: 201703 -./c++.at:573: $PREPARSER ./list +Legac++ +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250464,11 +249792,49 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:849: $PREPARSER ./input +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stderr: +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: ./c++.at:659: $PREPARSER ./input stderr: Starting parse @@ -250518,58 +249884,12 @@ ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:574: $here/modern -stdout: -Modern C++: 201703 -./c++.at:574: $PREPARSER ./list -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:572: $here/modern +./c++.at:573: $here/modern stdout: Legac++ -./c++.at:572: $PREPARSER ./list +./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250592,19 +249912,41 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:570: $here/modern +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -Modern C++: 201703 -./c++.at:570: $PREPARSER ./list +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:574: $here/modern +stdout: +Legac++ +./c++.at:574: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250627,123 +249969,29 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:568: $here/modern -stdout: -Modern C++: 202100 -./c++.at:568: $PREPARSER ./list +./c++.at:1360: $PREPARSER ./input aaaas stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -668. c++.at:568: ok - -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -676. c++.at:794: testing Doxygen Public Documentation ... -./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:794: doxygen --version || exit 77 ---- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr 2024-04-02 12:26:52.212897278 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found -stdout: -676. c++.at:794: skipped (c++.at:794) - -677. c++.at:795: testing Doxygen Private Documentation ... -./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:795: doxygen --version || exit 77 +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal stderr: -stdout: -./c++.at:659: $PREPARSER ./input +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ---- /dev/null 2024-04-01 11:23:58.000000000 +0000 -+++ /build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr 2024-04-02 12:26:52.424891892 +0000 -@@ -0,0 +1 @@ -+/build/reproducible-path/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found +./c++.at:1360: $PREPARSER ./input i stdout: -677. c++.at:795: skipped (c++.at:795) -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - -678. c++.at:848: testing Relative namespace references ... -./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: $here/modern stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 201402 -./c++.at:571: $PREPARSER ./list +Legac++ +./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250766,93 +250014,284 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:1360: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56d3ba00->Object::Object { } +Next token is token 'a' (0x56d3ba00 'a') +Shifting token 'a' (0x56d3ba00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56d3ba00 'a') +-> $$ = nterm item (0x56d3ba00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x56d3ba30->Object::Object { 0x56d3ba00 } +Next token is token 'a' (0x56d3ba30 'a') +Shifting token 'a' (0x56d3ba30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56d3ba30 'a') +-> $$ = nterm item (0x56d3ba30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x56d3ba60->Object::Object { 0x56d3ba00, 0x56d3ba30 } +Next token is token 'a' (0x56d3ba60 'a') +Shifting token 'a' (0x56d3ba60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56d3ba60 'a') +-> $$ = nterm item (0x56d3ba60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x56d3ba90->Object::Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60 } +Next token is token 'a' (0x56d3ba90 'a') +Shifting token 'a' (0x56d3ba90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56d3ba90 'a') +-> $$ = nterm item (0x56d3ba90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x56d3bac0->Object::Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60, 0x56d3ba90 } +Next token is token 'p' (0x56d3bac0 'p'Exception caught: cleaning lookahead and stack +0x56d3bac0->Object::~Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60, 0x56d3ba90, 0x56d3bac0 } +0x56d3ba90->Object::~Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60, 0x56d3ba90 } +0x56d3ba60->Object::~Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60 } +0x56d3ba30->Object::~Object { 0x56d3ba00, 0x56d3ba30 } +0x56d3ba00->Object::~Object { 0x56d3ba00 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56d3ba00->Object::Object { } +Next token is token 'a' (0x56d3ba00 'a') +Shifting token 'a' (0x56d3ba00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56d3ba00 'a') +-> $$ = nterm item (0x56d3ba00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x56d3ba30->Object::Object { 0x56d3ba00 } +Next token is token 'a' (0x56d3ba30 'a') +Shifting token 'a' (0x56d3ba30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56d3ba30 'a') +-> $$ = nterm item (0x56d3ba30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x56d3ba60->Object::Object { 0x56d3ba00, 0x56d3ba30 } +Next token is token 'a' (0x56d3ba60 'a') +Shifting token 'a' (0x56d3ba60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56d3ba60 'a') +-> $$ = nterm item (0x56d3ba60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x56d3ba90->Object::Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60 } +Next token is token 'a' (0x56d3ba90 'a') +Shifting token 'a' (0x56d3ba90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56d3ba90 'a') +-> $$ = nterm item (0x56d3ba90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x56d3bac0->Object::Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60, 0x56d3ba90 } +Next token is token 'p' (0x56d3bac0 'p'Exception caught: cleaning lookahead and stack +0x56d3bac0->Object::~Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60, 0x56d3ba90, 0x56d3bac0 } +0x56d3ba90->Object::~Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60, 0x56d3ba90 } +0x56d3ba60->Object::~Object { 0x56d3ba00, 0x56d3ba30, 0x56d3ba60 } +0x56d3ba30->Object::~Object { 0x56d3ba00, 0x56d3ba30 } +0x56d3ba00->Object::~Object { 0x56d3ba00 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:92: $PREPARSER ./input +stderr: +./c++.at:1360: $PREPARSER ./input aaaaT +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stderr: +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 1876 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 144 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1066: $PREPARSER ./input < in +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./c++.at:849: $PREPARSER ./input stderr: ./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid character +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +./c++.at:1066: $PREPARSER ./input < in +stderr: +stderr: +error: invalid expression +syntax error +Discarding 'a'. +Reducing 'a'. ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:573: $here/modern +./c++.at:855: $PREPARSER ./input +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +======== Testing with C++ standard flags: '' +./c++.at:1066: $PREPARSER ./input < in +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: -Legac++ -./c++.at:573: $PREPARSER ./list stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +error: invalid expression +caught error +error: invalid character +caught error stderr: stdout: -./c++.at:569: $here/modern +stderr: stdout: -Modern C++: 202100 -./c++.at:569: $PREPARSER ./list stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -669. c++.at:569: ok - -679. c++.at:854: testing Absolute namespace references ... -./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: stderr: stdout: +stdout: +./c++.at:235: $PREPARSER ./list ./c++.at:659: $PREPARSER ./input +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in +stderr: stderr: Starting parse Entering state 0 @@ -250898,26 +250337,51 @@ Cleanup: popping token EOI () Cleanup: popping nterm expr (40) destroy: 40 +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid expression +======== Testing with C++ standard flags: '' +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./c++.at:849: $PREPARSER ./input +./c++.at:1065: $PREPARSER ./input < in +./c++.at:92: $PREPARSER ./input stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1066: ./check +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:574: $here/modern +./c++.at:567: $here/modern +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: +stderr: stdout: Legac++ -./c++.at:574: $PREPARSER ./list +./c++.at:567: $PREPARSER ./list +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: stderr: Destroy: "0" Destroy: "0" @@ -250940,40 +250404,16 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: +./c++.at:568: $here/modern +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +Legac++ +./c++.at:568: $PREPARSER ./list stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:572: $here/modern -stdout: -Modern C++: 201103 -./c++.at:572: $PREPARSER ./list -stderr: +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Destroy: "0" Destroy: "0" Destroy: 1 @@ -250995,16 +250435,15 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:570: $here/modern +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -Modern C++: 202002 +Legac++ ./c++.at:570: $PREPARSER ./list +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: Destroy: "0" Destroy: "0" @@ -251032,112 +250471,179 @@ ./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: ./check +-std=c++03 not supported ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern -stdout: -Modern C++: 201703 -./c++.at:571: $PREPARSER ./list +./c++.at:1360: $PREPARSER ./input aaaas stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:659: $PREPARSER ./input -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1066: ./check +./c++.at:1360: $PREPARSER ./input i +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 +0x57dd6a00->Object::Object { } +Next token is token 'a' (0x57dd6a00 'a') +Shifting token 'a' (0x57dd6a00 'a') Entering state 2 Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57dd6a00 'a') +-> $$ = nterm item (0x57dd6a00 'a') +Entering state 11 +Stack now 0 11 Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 +0x57dd6a30->Object::Object { 0x57dd6a00 } +Next token is token 'a' (0x57dd6a30 'a') +Shifting token 'a' (0x57dd6a30 'a') Entering state 2 -Stack now 0 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57dd6a30 'a') +-> $$ = nterm item (0x57dd6a30 'a') +Entering state 11 +Stack now 0 11 11 Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +0x57dd6a60->Object::Object { 0x57dd6a00, 0x57dd6a30 } +Next token is token 'a' (0x57dd6a60 'a') +Shifting token 'a' (0x57dd6a60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57dd6a60 'a') +-> $$ = nterm item (0x57dd6a60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x57dd6a90->Object::Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60 } +Next token is token 'a' (0x57dd6a90 'a') +Shifting token 'a' (0x57dd6a90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57dd6a90 'a') +-> $$ = nterm item (0x57dd6a90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x57dd6ac0->Object::Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60, 0x57dd6a90 } +Next token is token 'p' (0x57dd6ac0 'p'Exception caught: cleaning lookahead and stack +0x57dd6ac0->Object::~Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60, 0x57dd6a90, 0x57dd6ac0 } +0x57dd6a90->Object::~Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60, 0x57dd6a90 } +0x57dd6a60->Object::~Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60 } +0x57dd6a30->Object::~Object { 0x57dd6a00, 0x57dd6a30 } +0x57dd6a00->Object::~Object { 0x57dd6a00 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x57dd6a00->Object::Object { } +Next token is token 'a' (0x57dd6a00 'a') +Shifting token 'a' (0x57dd6a00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57dd6a00 'a') +-> $$ = nterm item (0x57dd6a00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x57dd6a30->Object::Object { 0x57dd6a00 } +Next token is token 'a' (0x57dd6a30 'a') +Shifting token 'a' (0x57dd6a30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57dd6a30 'a') +-> $$ = nterm item (0x57dd6a30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x57dd6a60->Object::Object { 0x57dd6a00, 0x57dd6a30 } +Next token is token 'a' (0x57dd6a60 'a') +Shifting token 'a' (0x57dd6a60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57dd6a60 'a') +-> $$ = nterm item (0x57dd6a60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x57dd6a90->Object::Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60 } +Next token is token 'a' (0x57dd6a90 'a') +Shifting token 'a' (0x57dd6a90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57dd6a90 'a') +-> $$ = nterm item (0x57dd6a90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x57dd6ac0->Object::Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60, 0x57dd6a90 } +Next token is token 'p' (0x57dd6ac0 'p'Exception caught: cleaning lookahead and stack +0x57dd6ac0->Object::~Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60, 0x57dd6a90, 0x57dd6ac0 } +0x57dd6a90->Object::~Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60, 0x57dd6a90 } +0x57dd6a60->Object::~Object { 0x57dd6a00, 0x57dd6a30, 0x57dd6a60 } +0x57dd6a30->Object::~Object { 0x57dd6a00, 0x57dd6a30 } +0x57dd6a00->Object::~Object { 0x57dd6a00 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr stdout: -./c++.at:849: $PREPARSER ./input +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaT +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:573: $here/modern +./c++.at:571: $here/modern stdout: Legac++ -./c++.at:573: $PREPARSER ./list +stderr: +./c++.at:571: $PREPARSER ./list +stdout: stderr: Destroy: "0" Destroy: "0" @@ -251160,33 +250666,13 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:574: $here/modern +./c++.at:572: $here/modern +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: Legac++ -./c++.at:574: $PREPARSER ./list +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251209,78 +250695,15 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:659: $PREPARSER ./input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:572: $here/modern +./c++.at:569: $here/modern stdout: -Modern C++: 201402 -./c++.at:572: $PREPARSER ./list +Legac++ +./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251303,39 +250726,44 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:855: $PREPARSER ./input +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +./c++.at:849: $PREPARSER ./input +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:941: $PREPARSER ./input stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./c++.at:570: $here/modern +./c++.at:573: $here/modern stdout: -Modern C++: 202100 -./c++.at:570: $PREPARSER ./list +Legac++ +./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251358,20 +250786,16 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -670. c++.at:570: ok - -680. c++.at:863: testing Syntactically invalid namespace references ... -./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:573: $here/modern +./c++.at:566: $here/modern stdout: Modern C++: 201103 -./c++.at:573: $PREPARSER ./list +./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251394,23 +250818,15 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -680. c++.at:863: ok - -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -681. c++.at:884: testing Syntax error discarding no lookahead ... -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:571: $here/modern +./c++.at:574: $here/modern stdout: -Modern C++: 202002 -./c++.at:571: $PREPARSER ./list +Legac++ +./c++.at:574: $PREPARSER ./list +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: Destroy: "0" Destroy: "0" @@ -251433,27 +250849,38 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:855: $PREPARSER ./input stderr: ./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -stderr: ./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: ./c++.at:659: $PREPARSER ./input @@ -251505,13 +250932,245 @@ ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./c++.at:574: $here/modern +./c++.at:92: $PREPARSER ./input +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:1360: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56958a00->Object::Object { } +Next token is token 'a' (0x56958a00 'a') +Shifting token 'a' (0x56958a00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56958a00 'a') +-> $$ = nterm item (0x56958a00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x56958a30->Object::Object { 0x56958a00 } +Next token is token 'a' (0x56958a30 'a') +Shifting token 'a' (0x56958a30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56958a30 'a') +-> $$ = nterm item (0x56958a30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x56958a60->Object::Object { 0x56958a00, 0x56958a30 } +Next token is token 'a' (0x56958a60 'a') +Shifting token 'a' (0x56958a60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56958a60 'a') +-> $$ = nterm item (0x56958a60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x56958a90->Object::Object { 0x56958a00, 0x56958a30, 0x56958a60 } +Next token is token 'a' (0x56958a90 'a') +Shifting token 'a' (0x56958a90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56958a90 'a') +-> $$ = nterm item (0x56958a90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x56958ac0->Object::Object { 0x56958a00, 0x56958a30, 0x56958a60, 0x56958a90 } +Next token is token 'p' (0x56958ac0 'p'Exception caught: cleaning lookahead and stack +0x56958ac0->Object::~Object { 0x56958a00, 0x56958a30, 0x56958a60, 0x56958a90, 0x56958ac0 } +0x56958a90->Object::~Object { 0x56958a00, 0x56958a30, 0x56958a60, 0x56958a90 } +0x56958a60->Object::~Object { 0x56958a00, 0x56958a30, 0x56958a60 } +0x56958a30->Object::~Object { 0x56958a00, 0x56958a30 } +0x56958a00->Object::~Object { 0x56958a00 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x56958a00->Object::Object { } +Next token is token 'a' (0x56958a00 'a') +Shifting token 'a' (0x56958a00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56958a00 'a') +-> $$ = nterm item (0x56958a00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x56958a30->Object::Object { 0x56958a00 } +Next token is token 'a' (0x56958a30 'a') +Shifting token 'a' (0x56958a30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56958a30 'a') +-> $$ = nterm item (0x56958a30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x56958a60->Object::Object { 0x56958a00, 0x56958a30 } +Next token is token 'a' (0x56958a60 'a') +Shifting token 'a' (0x56958a60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56958a60 'a') +-> $$ = nterm item (0x56958a60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x56958a90->Object::Object { 0x56958a00, 0x56958a30, 0x56958a60 } +Next token is token 'a' (0x56958a90 'a') +Shifting token 'a' (0x56958a90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x56958a90 'a') +-> $$ = nterm item (0x56958a90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x56958ac0->Object::Object { 0x56958a00, 0x56958a30, 0x56958a60, 0x56958a90 } +Next token is token 'p' (0x56958ac0 'p'Exception caught: cleaning lookahead and stack +0x56958ac0->Object::~Object { 0x56958a00, 0x56958a30, 0x56958a60, 0x56958a90, 0x56958ac0 } +0x56958a90->Object::~Object { 0x56958a00, 0x56958a30, 0x56958a60, 0x56958a90 } +0x56958a60->Object::~Object { 0x56958a00, 0x56958a30, 0x56958a60 } +0x56958a30->Object::~Object { 0x56958a00, 0x56958a30 } +0x56958a00->Object::~Object { 0x56958a00 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:849: $PREPARSER ./input +./c++.at:1360: $PREPARSER ./input aaaaT +stderr: +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:568: $here/modern stdout: Modern C++: 201103 -./c++.at:574: $PREPARSER ./list +./c++.at:568: $PREPARSER ./list stderr: Destroy: "" Destroy: "" @@ -251534,30 +251193,16 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: +./c++.at:570: $here/modern stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +Modern C++: 201103 +./c++.at:570: $PREPARSER ./list stderr: -stdout: -./c++.at:572: $here/modern -stdout: -Modern C++: 201703 -./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251580,104 +251225,17 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -stderr: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: stdout: -./c++.at:659: $PREPARSER ./input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token NUMBER (1) -Shifting token NUMBER (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 1 (line 34): - $1 = token NUMBER (1) --> $$ = nterm expr (10) -destroy: 1 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token NUMBER (30) -Reducing stack by rule 2 (line 35): --> $$ = nterm @1 (20) -Entering state 4 -Stack now 0 2 4 -Next token is token NUMBER (30) -Shifting token NUMBER (30) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 3 (line 35): - $1 = nterm expr (10) - $2 = nterm @1 (20) - $3 = token NUMBER (30) -expr: 10 20 30 --> $$ = nterm expr (40) -destroy: 30 -destroy: 20 -destroy: 10 -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token EOI () -Shifting token EOI () -Entering state 3 -Stack now 0 2 3 -Stack now 0 2 3 -Cleanup: popping token EOI () -Cleanup: popping nterm expr (40) -destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:573: $here/modern -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:567: $here/modern stdout: -Modern C++: 201402 -./c++.at:573: $PREPARSER ./list +Modern C++: 201103 +./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251700,47 +251258,17 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:571: $here/modern stdout: -Modern C++: 202100 +Modern C++: 201103 ./c++.at:571: $PREPARSER ./list stderr: Destroy: "0" @@ -251765,67 +251293,51 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -671. c++.at:571: ok - -682. c++.at:1064: testing Syntax error as exception: lalr1.cc ... -./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stderr: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:574: $here/modern +./c++.at:572: $here/modern stdout: -Modern C++: 201402 -./c++.at:574: $PREPARSER ./list +Modern C++: 201103 +./c++.at:572: $PREPARSER ./list stderr: -Destroy: "" -Destroy: "" +Destroy: "0" +Destroy: "0" Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) Destroy: "" Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:856: $PREPARSER ./input stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:572: $here/modern +./c++.at:569: $here/modern stdout: -Modern C++: 202002 -./c++.at:572: $PREPARSER ./list +Modern C++: 201103 +./c++.at:569: $PREPARSER ./list +stderr: stderr: Destroy: "0" Destroy: "0" @@ -251848,24 +251360,51 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./existing.at:1460: $PREPARSER ./input +stderr: +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +624. existing.at:1460: ok + stderr: stdout: -./c++.at:941: $PREPARSER ./input stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:850: $PREPARSER ./input +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1064: $PREPARSER ./input < in +stderr: +stdout: +686. c++.at:1361: testing Exception safety without error recovery ... +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: ./c++.at:659: $PREPARSER ./input +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -251911,73 +251450,37 @@ Cleanup: popping token EOI () Cleanup: popping nterm expr (40) destroy: 40 -./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -675. c++.at:584: ok - -683. c++.at:1065: testing Syntax error as exception: glr.cc ... -./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1064: $PREPARSER ./input < in +./c++.at:1065: $PREPARSER ./input < in stderr: error: invalid expression caught error error: invalid character caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in stderr: error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in stderr: error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:573: $here/modern +./c++.at:566: $here/modern stdout: -Modern C++: 201703 -./c++.at:573: $PREPARSER ./list +Modern C++: 201402 +./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -252000,124 +251503,51 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -stderr: -error: invalid character -stdout: -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:574: $here/modern +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -stdout: -Modern C++: 201703 -./c++.at:574: $PREPARSER ./list -./c++.at:941: $PREPARSER ./input -stderr: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:92: $PREPARSER ./input stderr: -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:856: $PREPARSER ./input stderr: ./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +663. c++.at:26: ok +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: + stdout: -./c++.at:1064: $PREPARSER ./input < in +./c++.at:1066: $PREPARSER ./input < in stderr: error: invalid expression caught error error: invalid character caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in stderr: error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in stderr: error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ... +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS stderr: stdout: -./c++.at:572: $here/modern +./c++.at:573: $here/modern stdout: -Modern C++: 202100 -./c++.at:572: $PREPARSER ./list +Modern C++: 201103 +./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -252140,100 +251570,31 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -672. c++.at:572: ok - -684. c++.at:1066: testing Syntax error as exception: glr2.cc ... -./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./c++.at:1066: ./check ./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: +./c++.at:574: $here/modern stdout: -./c++.at:856: $PREPARSER ./input +Modern C++: 201103 +./c++.at:574: $PREPARSER ./list stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:941: $PREPARSER ./input stderr: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS syntax error Discarding 'a'. Reducing 'a'. ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1064: $PREPARSER ./input < in -stdout: -stderr: -./c++.at:850: $PREPARSER ./input -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:856: $PREPARSER ./input stderr: stdout: -stderr: -./c++.at:574: $here/modern -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stdout: -Modern C++: 202002 -./c++.at:574: $PREPARSER ./list -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: Destroy: "" Destroy: "" Destroy: 1 @@ -252255,206 +251616,41 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) +./c++.at:235: $PREPARSER ./list ./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -stderr: -stdout: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS 681. c++.at:884: ok -685. c++.at:1360: testing Exception safety with error recovery ... -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -stderr: -stdout: -./c++.at:573: $here/modern -stdout: -Modern C++: 202002 -./c++.at:573: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stdout: -./c++.at:850: $PREPARSER ./input stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:851: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1360: ./exceptions || exit 77 +./c++.at:1361: ./exceptions || exit 77 stderr: Inner caught Outer caught -./c++.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -stderr: -stdout: -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -stderr: -./c++.at:1066: $PREPARSER ./input < in -stdout: -./c++.at:856: $PREPARSER ./input -stderr: -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -./c++.at:857: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1066: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1066: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: +./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy stdout: -stderr: ./c++.at:1360: $PREPARSER ./input aaaas -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' stderr: exception caught: reduction -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaal stderr: exception caught: yylex ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ... ./c++.at:1360: $PREPARSER ./input i +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS stderr: exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaap stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -252464,57 +251660,57 @@ Entering state 0 Stack now 0 Reading a token -0x57f8ea00->Object::Object { } -Next token is token 'a' (0x57f8ea00 'a') -Shifting token 'a' (0x57f8ea00 'a') +0x56b7aa00->Object::Object { } +Next token is token 'a' (0x56b7aa00 'a') +Shifting token 'a' (0x56b7aa00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57f8ea00 'a') --> $$ = nterm item (0x57f8ea00 'a') + $1 = token 'a' (0x56b7aa00 'a') +-> $$ = nterm item (0x56b7aa00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57f8ea30->Object::Object { 0x57f8ea00 } -Next token is token 'a' (0x57f8ea30 'a') -Shifting token 'a' (0x57f8ea30 'a') +0x56b7aa30->Object::Object { 0x56b7aa00 } +Next token is token 'a' (0x56b7aa30 'a') +Shifting token 'a' (0x56b7aa30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57f8ea30 'a') --> $$ = nterm item (0x57f8ea30 'a') + $1 = token 'a' (0x56b7aa30 'a') +-> $$ = nterm item (0x56b7aa30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57f8ea60->Object::Object { 0x57f8ea00, 0x57f8ea30 } -Next token is token 'a' (0x57f8ea60 'a') -Shifting token 'a' (0x57f8ea60 'a') +0x56b7aa60->Object::Object { 0x56b7aa00, 0x56b7aa30 } +Next token is token 'a' (0x56b7aa60 'a') +Shifting token 'a' (0x56b7aa60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57f8ea60 'a') --> $$ = nterm item (0x57f8ea60 'a') + $1 = token 'a' (0x56b7aa60 'a') +-> $$ = nterm item (0x56b7aa60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57f8ea90->Object::Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60 } -Next token is token 'a' (0x57f8ea90 'a') -Shifting token 'a' (0x57f8ea90 'a') +0x56b7aa90->Object::Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60 } +Next token is token 'a' (0x56b7aa90 'a') +Shifting token 'a' (0x56b7aa90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57f8ea90 'a') --> $$ = nterm item (0x57f8ea90 'a') + $1 = token 'a' (0x56b7aa90 'a') +-> $$ = nterm item (0x56b7aa90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57f8eac0->Object::Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60, 0x57f8ea90 } -Next token is token 'p' (0x57f8eac0 'p'Exception caught: cleaning lookahead and stack -0x57f8eac0->Object::~Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60, 0x57f8ea90, 0x57f8eac0 } -0x57f8ea90->Object::~Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60, 0x57f8ea90 } -0x57f8ea60->Object::~Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60 } -0x57f8ea30->Object::~Object { 0x57f8ea00, 0x57f8ea30 } -0x57f8ea00->Object::~Object { 0x57f8ea00 } +0x56b7aac0->Object::Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60, 0x56b7aa90 } +Next token is token 'p' (0x56b7aac0 'p'Exception caught: cleaning lookahead and stack +0x56b7aac0->Object::~Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60, 0x56b7aa90, 0x56b7aac0 } +0x56b7aa90->Object::~Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60, 0x56b7aa90 } +0x56b7aa60->Object::~Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60 } +0x56b7aa30->Object::~Object { 0x56b7aa00, 0x56b7aa30 } +0x56b7aa00->Object::~Object { 0x56b7aa00 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -252523,70 +251719,69 @@ Entering state 0 Stack now 0 Reading a token -0x57f8ea00->Object::Object { } -Next token is token 'a' (0x57f8ea00 'a') -Shifting token 'a' (0x57f8ea00 'a') +0x56b7aa00->Object::Object { } +Next token is token 'a' (0x56b7aa00 'a') +Shifting token 'a' (0x56b7aa00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57f8ea00 'a') --> $$ = nterm item (0x57f8ea00 'a') + $1 = token 'a' (0x56b7aa00 'a') +-> $$ = nterm item (0x56b7aa00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57f8ea30->Object::Object { 0x57f8ea00 } -Next token is token 'a' (0x57f8ea30 'a') -Shifting token 'a' (0x57f8ea30 'a') +0x56b7aa30->Object::Object { 0x56b7aa00 } +Next token is token 'a' (0x56b7aa30 'a') +Shifting token 'a' (0x56b7aa30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57f8ea30 'a') --> $$ = nterm item (0x57f8ea30 'a') + $1 = token 'a' (0x56b7aa30 'a') +-> $$ = nterm item (0x56b7aa30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57f8ea60->Object::Object { 0x57f8ea00, 0x57f8ea30 } -Next token is token 'a' (0x57f8ea60 'a') -Shifting token 'a' (0x57f8ea60 'a') +0x56b7aa60->Object::Object { 0x56b7aa00, 0x56b7aa30 } +Next token is token 'a' (0x56b7aa60 'a') +Shifting token 'a' (0x56b7aa60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57f8ea60 'a') --> $$ = nterm item (0x57f8ea60 'a') + $1 = token 'a' (0x56b7aa60 'a') +-> $$ = nterm item (0x56b7aa60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57f8ea90->Object::Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60 } -Next token is token 'a' (0x57f8ea90 'a') -Shifting token 'a' (0x57f8ea90 'a') +0x56b7aa90->Object::Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60 } +Next token is token 'a' (0x56b7aa90 'a') +Shifting token 'a' (0x56b7aa90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57f8ea90 'a') --> $$ = nterm item (0x57f8ea90 'a') + $1 = token 'a' (0x56b7aa90 'a') +-> $$ = nterm item (0x56b7aa90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57f8eac0->Object::Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60, 0x57f8ea90 } -Next token is token 'p' (0x57f8eac0 'p'Exception caught: cleaning lookahead and stack -0x57f8eac0->Object::~Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60, 0x57f8ea90, 0x57f8eac0 } -0x57f8ea90->Object::~Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60, 0x57f8ea90 } -0x57f8ea60->Object::~Object { 0x57f8ea00, 0x57f8ea30, 0x57f8ea60 } -0x57f8ea30->Object::~Object { 0x57f8ea00, 0x57f8ea30 } -0x57f8ea00->Object::~Object { 0x57f8ea00 } +0x56b7aac0->Object::Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60, 0x56b7aa90 } +Next token is token 'p' (0x56b7aac0 'p'Exception caught: cleaning lookahead and stack +0x56b7aac0->Object::~Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60, 0x56b7aa90, 0x56b7aac0 } +0x56b7aa90->Object::~Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60, 0x56b7aa90 } +0x56b7aa60->Object::~Object { 0x56b7aa00, 0x56b7aa30, 0x56b7aa60 } +0x56b7aa30->Object::~Object { 0x56b7aa00, 0x56b7aa30 } +0x56b7aa00->Object::~Object { 0x56b7aa00 } exception caught: printer end { } +======== Testing with C++ standard flags: '' ./c++.at:1360: grep '^exception caught: printer$' stderr +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: exception caught: printer ./c++.at:1360: $PREPARSER ./input aaaae stderr: exception caught: syntax error ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ./c++.at:1360: $PREPARSER ./input aaaaE -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -252600,26 +251795,52 @@ ./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in +./c++.at:850: $PREPARSER ./input stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +stdout: +./c++.at:1362: ./exceptions || exit 77 stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Inner caught +Outer caught +./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1363: ./exceptions || exit 77 +stderr: +Inner caught +Outer caught +./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: +stdout: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:1064: $PREPARSER ./input < in @@ -252631,16 +251852,15 @@ ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1064: $PREPARSER ./input < in stderr: +stdout: +stderr: stderr: stdout: -./c++.at:857: $PREPARSER ./input +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS error: invalid expression ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:1064: $PREPARSER ./input < in -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: error: invalid character ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -252648,10 +251868,14 @@ ./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:573: $here/modern +./c++.at:570: $here/modern +stdout: +stderr: +Modern C++: 201402 +./c++.at:570: $PREPARSER ./list +stderr: +stdout: stdout: -Modern C++: 202100 -./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -252674,24 +251898,47 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -673. c++.at:573: ok - +./c++.at:567: $here/modern +./c++.at:856: $PREPARSER ./input stderr: +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -686. c++.at:1361: testing Exception safety without error recovery ... -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -stderr: stdout: -./c++.at:574: $here/modern +stderr: +Modern C++: 201402 +./c++.at:568: $here/modern +./c++.at:567: $PREPARSER ./list +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -Modern C++: 202100 -./c++.at:574: $PREPARSER ./list +stderr: +Modern C++: 201402 +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:568: $PREPARSER ./list +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "" Destroy: "" @@ -252714,197 +251961,72 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -674. c++.at:574: ok - -687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ... -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1361: ./exceptions || exit 77 -stderr: -Inner caught -Outer caught -./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy ======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:857: $PREPARSER ./input -stderr: -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:659: $PREPARSER ./input ======== Testing with C++ standard flags: '' -./c++.at:1360: $PREPARSER ./input aaaas -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input --debug aaaap +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x56a90a00->Object::Object { } -Next token is token 'a' (0x56a90a00 'a') -Shifting token 'a' (0x56a90a00 'a') +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56a90a00 'a') --> $$ = nterm item (0x56a90a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56a90a30->Object::Object { 0x56a90a00 } -Next token is token 'a' (0x56a90a30 'a') -Shifting token 'a' (0x56a90a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56a90a30 'a') --> $$ = nterm item (0x56a90a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56a90a60->Object::Object { 0x56a90a00, 0x56a90a30 } -Next token is token 'a' (0x56a90a60 'a') -Shifting token 'a' (0x56a90a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56a90a60 'a') --> $$ = nterm item (0x56a90a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56a90a90->Object::Object { 0x56a90a00, 0x56a90a30, 0x56a90a60 } -Next token is token 'a' (0x56a90a90 'a') -Shifting token 'a' (0x56a90a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56a90a90 'a') --> $$ = nterm item (0x56a90a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x56a90ac0->Object::Object { 0x56a90a00, 0x56a90a30, 0x56a90a60, 0x56a90a90 } -Next token is token 'p' (0x56a90ac0 'p'Exception caught: cleaning lookahead and stack -0x56a90ac0->Object::~Object { 0x56a90a00, 0x56a90a30, 0x56a90a60, 0x56a90a90, 0x56a90ac0 } -0x56a90a90->Object::~Object { 0x56a90a00, 0x56a90a30, 0x56a90a60, 0x56a90a90 } -0x56a90a60->Object::~Object { 0x56a90a00, 0x56a90a30, 0x56a90a60 } -0x56a90a30->Object::~Object { 0x56a90a00, 0x56a90a30 } -0x56a90a00->Object::~Object { 0x56a90a00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 Reading a token -0x56a90a00->Object::Object { } -Next token is token 'a' (0x56a90a00 'a') -Shifting token 'a' (0x56a90a00 'a') +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56a90a00 'a') --> $$ = nterm item (0x56a90a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56a90a30->Object::Object { 0x56a90a00 } -Next token is token 'a' (0x56a90a30 'a') -Shifting token 'a' (0x56a90a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56a90a30 'a') --> $$ = nterm item (0x56a90a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56a90a60->Object::Object { 0x56a90a00, 0x56a90a30 } -Next token is token 'a' (0x56a90a60 'a') -Shifting token 'a' (0x56a90a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56a90a60 'a') --> $$ = nterm item (0x56a90a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56a90a90->Object::Object { 0x56a90a00, 0x56a90a30, 0x56a90a60 } -Next token is token 'a' (0x56a90a90 'a') -Shifting token 'a' (0x56a90a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56a90a90 'a') --> $$ = nterm item (0x56a90a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 Reading a token -0x56a90ac0->Object::Object { 0x56a90a00, 0x56a90a30, 0x56a90a60, 0x56a90a90 } -Next token is token 'p' (0x56a90ac0 'p'Exception caught: cleaning lookahead and stack -0x56a90ac0->Object::~Object { 0x56a90a00, 0x56a90a30, 0x56a90a60, 0x56a90a90, 0x56a90ac0 } -0x56a90a90->Object::~Object { 0x56a90a00, 0x56a90a30, 0x56a90a60, 0x56a90a90 } -0x56a90a60->Object::~Object { 0x56a90a00, 0x56a90a30, 0x56a90a60 } -0x56a90a30->Object::~Object { 0x56a90a00, 0x56a90a30 } -0x56a90a00->Object::~Object { 0x56a90a00 } -exception caught: printer -end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:1362: ./exceptions || exit 77 -stderr: -Inner caught -Outer caught -./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:1065: $PREPARSER ./input < in @@ -252926,982 +252048,402 @@ ./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:857: $PREPARSER ./input -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: +./c++.at:571: $here/modern stdout: -./c++.at:851: $PREPARSER ./input +Modern C++: 201402 +./c++.at:571: $PREPARSER ./list stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $here/modern stdout: -./c++.at:857: $PREPARSER ./input +Modern C++: 201402 +./c++.at:572: $PREPARSER ./list stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas ./c++.at:1360: $PREPARSER ./input aaaal stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1361: $PREPARSER ./input aaaal exception caught: yylex +stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input i ./c++.at:1360: $PREPARSER ./input i stderr: +stderr: exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaap +exception caught: initial-action stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap ./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x580d7a00->Object::Object { } -Next token is token 'a' (0x580d7a00 'a') -Shifting token 'a' (0x580d7a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580d7a00 'a') --> $$ = nterm item (0x580d7a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x580d7a30->Object::Object { 0x580d7a00 } -Next token is token 'a' (0x580d7a30 'a') -Shifting token 'a' (0x580d7a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580d7a30 'a') --> $$ = nterm item (0x580d7a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x580d7a60->Object::Object { 0x580d7a00, 0x580d7a30 } -Next token is token 'a' (0x580d7a60 'a') -Shifting token 'a' (0x580d7a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580d7a60 'a') --> $$ = nterm item (0x580d7a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x580d7a90->Object::Object { 0x580d7a00, 0x580d7a30, 0x580d7a60 } -Next token is token 'a' (0x580d7a90 'a') -Shifting token 'a' (0x580d7a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580d7a90 'a') --> $$ = nterm item (0x580d7a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x580d7ac0->Object::Object { 0x580d7a00, 0x580d7a30, 0x580d7a60, 0x580d7a90 } -Next token is token 'p' (0x580d7ac0 'p'Exception caught: cleaning lookahead and stack -0x580d7ac0->Object::~Object { 0x580d7a00, 0x580d7a30, 0x580d7a60, 0x580d7a90, 0x580d7ac0 } -0x580d7a90->Object::~Object { 0x580d7a00, 0x580d7a30, 0x580d7a60, 0x580d7a90 } -0x580d7a60->Object::~Object { 0x580d7a00, 0x580d7a30, 0x580d7a60 } -0x580d7a30->Object::~Object { 0x580d7a00, 0x580d7a30 } -0x580d7a00->Object::~Object { 0x580d7a00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1361: $PREPARSER ./input --debug aaaap Starting parse Entering state 0 Stack now 0 Reading a token -0x580d7a00->Object::Object { } -Next token is token 'a' (0x580d7a00 'a') -Shifting token 'a' (0x580d7a00 'a') +0x57506a00->Object::Object { } +Next token is token 'a' (0x57506a00 'a') +Shifting token 'a' (0x57506a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580d7a00 'a') --> $$ = nterm item (0x580d7a00 'a') + $1 = token 'a' (0x57506a00 'a') +-> $$ = nterm item (0x57506a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x580d7a30->Object::Object { 0x580d7a00 } -Next token is token 'a' (0x580d7a30 'a') -Shifting token 'a' (0x580d7a30 'a') +0x57506a30->Object::Object { 0x57506a00 } +Next token is token 'a' (0x57506a30 'a') +Shifting token 'a' (0x57506a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580d7a30 'a') --> $$ = nterm item (0x580d7a30 'a') + $1 = token 'a' (0x57506a30 'a') +-> $$ = nterm item (0x57506a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x580d7a60->Object::Object { 0x580d7a00, 0x580d7a30 } -Next token is token 'a' (0x580d7a60 'a') -Shifting token 'a' (0x580d7a60 'a') +0x57506a60->Object::Object { 0x57506a00, 0x57506a30 } +Next token is token 'a' (0x57506a60 'a') +Shifting token 'a' (0x57506a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580d7a60 'a') --> $$ = nterm item (0x580d7a60 'a') + $1 = token 'a' (0x57506a60 'a') +-> $$ = nterm item (0x57506a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x580d7a90->Object::Object { 0x580d7a00, 0x580d7a30, 0x580d7a60 } -Next token is token 'a' (0x580d7a90 'a') -Shifting token 'a' (0x580d7a90 'a') +0x57506a90->Object::Object { 0x57506a00, 0x57506a30, 0x57506a60 } +Next token is token 'a' (0x57506a90 'a') +Shifting token 'a' (0x57506a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x580d7a90 'a') --> $$ = nterm item (0x580d7a90 'a') + $1 = token 'a' (0x57506a90 'a') +-> $$ = nterm item (0x57506a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x580d7ac0->Object::Object { 0x580d7a00, 0x580d7a30, 0x580d7a60, 0x580d7a90 } -Next token is token 'p' (0x580d7ac0 'p'Exception caught: cleaning lookahead and stack -0x580d7ac0->Object::~Object { 0x580d7a00, 0x580d7a30, 0x580d7a60, 0x580d7a90, 0x580d7ac0 } -0x580d7a90->Object::~Object { 0x580d7a00, 0x580d7a30, 0x580d7a60, 0x580d7a90 } -0x580d7a60->Object::~Object { 0x580d7a00, 0x580d7a30, 0x580d7a60 } -0x580d7a30->Object::~Object { 0x580d7a00, 0x580d7a30 } -0x580d7a00->Object::~Object { 0x580d7a00 } +0x57506ac0->Object::Object { 0x57506a00, 0x57506a30, 0x57506a60, 0x57506a90 } +Next token is token 'p' (0x57506ac0 'p'Exception caught: cleaning lookahead and stack +0x57506ac0->Object::~Object { 0x57506a00, 0x57506a30, 0x57506a60, 0x57506a90, 0x57506ac0 } +0x57506a90->Object::~Object { 0x57506a00, 0x57506a30, 0x57506a60, 0x57506a90 } +0x57506a60->Object::~Object { 0x57506a00, 0x57506a30, 0x57506a60 } +0x57506a30->Object::~Object { 0x57506a00, 0x57506a30 } +0x57506a00->Object::~Object { 0x57506a00 } exception caught: printer end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1361: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1360: $PREPARSER ./input aaaaE -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1360: $PREPARSER ./input aaaaR -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1361: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x57653a00->Object::Object { } -Next token is token 'a' (0x57653a00 'a') -Shifting token 'a' (0x57653a00 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57653a00 'a') --> $$ = nterm item (0x57653a00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x57653a30->Object::Object { 0x57653a00 } -Next token is token 'a' (0x57653a30 'a') -Shifting token 'a' (0x57653a30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57653a30 'a') --> $$ = nterm item (0x57653a30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x57653a60->Object::Object { 0x57653a00, 0x57653a30 } -Next token is token 'a' (0x57653a60 'a') -Shifting token 'a' (0x57653a60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57653a60 'a') --> $$ = nterm item (0x57653a60 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x57653a90->Object::Object { 0x57653a00, 0x57653a30, 0x57653a60 } -Next token is token 'a' (0x57653a90 'a') -Shifting token 'a' (0x57653a90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57653a90 'a') --> $$ = nterm item (0x57653a90 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x57653ac0->Object::Object { 0x57653a00, 0x57653a30, 0x57653a60, 0x57653a90 } -Next token is token 'p' (0x57653ac0 'p'Exception caught: cleaning lookahead and stack -0x57653ac0->Object::~Object { 0x57653a00, 0x57653a30, 0x57653a60, 0x57653a90, 0x57653ac0 } -0x57653a90->Object::~Object { 0x57653a00, 0x57653a30, 0x57653a60, 0x57653a90 } -0x57653a60->Object::~Object { 0x57653a00, 0x57653a30, 0x57653a60 } -0x57653a30->Object::~Object { 0x57653a00, 0x57653a30 } -0x57653a00->Object::~Object { 0x57653a00 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x57653a00->Object::Object { } -Next token is token 'a' (0x57653a00 'a') -Shifting token 'a' (0x57653a00 'a') +0x57feea00->Object::Object { } +Next token is token 'a' (0x57feea00 'a') +Shifting token 'a' (0x57feea00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57653a00 'a') --> $$ = nterm item (0x57653a00 'a') + $1 = token 'a' (0x57feea00 'a') +-> $$ = nterm item (0x57feea00 'a') Entering state 10 Stack now 0 10 Reading a token -0x57653a30->Object::Object { 0x57653a00 } -Next token is token 'a' (0x57653a30 'a') -Shifting token 'a' (0x57653a30 'a') +0x57feea30->Object::Object { 0x57feea00 } +Next token is token 'a' (0x57feea30 'a') +Shifting token 'a' (0x57feea30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57653a30 'a') --> $$ = nterm item (0x57653a30 'a') + $1 = token 'a' (0x57feea30 'a') +-> $$ = nterm item (0x57feea30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x57653a60->Object::Object { 0x57653a00, 0x57653a30 } -Next token is token 'a' (0x57653a60 'a') -Shifting token 'a' (0x57653a60 'a') +0x57feea60->Object::Object { 0x57feea00, 0x57feea30 } +Next token is token 'a' (0x57feea60 'a') +Shifting token 'a' (0x57feea60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57653a60 'a') --> $$ = nterm item (0x57653a60 'a') + $1 = token 'a' (0x57feea60 'a') +-> $$ = nterm item (0x57feea60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x57653a90->Object::Object { 0x57653a00, 0x57653a30, 0x57653a60 } -Next token is token 'a' (0x57653a90 'a') -Shifting token 'a' (0x57653a90 'a') +0x57feea90->Object::Object { 0x57feea00, 0x57feea30, 0x57feea60 } +Next token is token 'a' (0x57feea90 'a') +Shifting token 'a' (0x57feea90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57653a90 'a') --> $$ = nterm item (0x57653a90 'a') + $1 = token 'a' (0x57feea90 'a') +-> $$ = nterm item (0x57feea90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x57653ac0->Object::Object { 0x57653a00, 0x57653a30, 0x57653a60, 0x57653a90 } -Next token is token 'p' (0x57653ac0 'p'Exception caught: cleaning lookahead and stack -0x57653ac0->Object::~Object { 0x57653a00, 0x57653a30, 0x57653a60, 0x57653a90, 0x57653ac0 } -0x57653a90->Object::~Object { 0x57653a00, 0x57653a30, 0x57653a60, 0x57653a90 } -0x57653a60->Object::~Object { 0x57653a00, 0x57653a30, 0x57653a60 } -0x57653a30->Object::~Object { 0x57653a00, 0x57653a30 } -0x57653a00->Object::~Object { 0x57653a00 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaT -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:857: $PREPARSER ./input -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1362: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xff94e70f->Object::Object { } -0xff94e7b8->Object::Object { 0xff94e70f } -0xff94e70f->Object::~Object { 0xff94e70f, 0xff94e7b8 } -Next token is token 'a' (0xff94e7b8 'a') -0xff94e708->Object::Object { 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0xff94e708, 0xff94e7b8 } -Shifting token 'a' (0xff94e708 'a') -0x5760f3c4->Object::Object { 0xff94e708 } -0xff94e708->Object::~Object { 0x5760f3c4, 0xff94e708 } -Entering state 2 -Stack now 0 2 -0xff94e7c8->Object::Object { 0x5760f3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5760f3c4 'a') --> $$ = nterm item (0xff94e7c8 'a') -0x5760f3c4->Object::~Object { 0x5760f3c4, 0xff94e7c8 } -0x5760f3c4->Object::Object { 0xff94e7c8 } -0xff94e7c8->Object::~Object { 0x5760f3c4, 0xff94e7c8 } -Entering state 11 -Stack now 0 11 -Reading a token -0xff94e70f->Object::Object { 0x5760f3c4 } -0xff94e7b8->Object::Object { 0x5760f3c4, 0xff94e70f } -0xff94e70f->Object::~Object { 0x5760f3c4, 0xff94e70f, 0xff94e7b8 } -Next token is token 'a' (0xff94e7b8 'a') -0xff94e708->Object::Object { 0x5760f3c4, 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0x5760f3c4, 0xff94e708, 0xff94e7b8 } -Shifting token 'a' (0xff94e708 'a') -0x5760f3d4->Object::Object { 0x5760f3c4, 0xff94e708 } -0xff94e708->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e708 } -Entering state 2 -Stack now 0 11 2 -0xff94e7c8->Object::Object { 0x5760f3c4, 0x5760f3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5760f3d4 'a') --> $$ = nterm item (0xff94e7c8 'a') -0x5760f3d4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7c8 } -0x5760f3d4->Object::Object { 0x5760f3c4, 0xff94e7c8 } -0xff94e7c8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7c8 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xff94e70f->Object::Object { 0x5760f3c4, 0x5760f3d4 } -0xff94e7b8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0xff94e70f } -0xff94e70f->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e70f, 0xff94e7b8 } -Next token is token 'a' (0xff94e7b8 'a') -0xff94e708->Object::Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e708, 0xff94e7b8 } -Shifting token 'a' (0xff94e708 'a') -0x5760f3e4->Object::Object { 0x5760f3c4, 0x5760f3d4, 0xff94e708 } -0xff94e708->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e708 } -Entering state 2 -Stack now 0 11 11 2 -0xff94e7c8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5760f3e4 'a') --> $$ = nterm item (0xff94e7c8 'a') -0x5760f3e4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7c8 } -0x5760f3e4->Object::Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7c8 } -0xff94e7c8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7c8 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xff94e70f->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4 } -0xff94e7b8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e70f } -0xff94e70f->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e70f, 0xff94e7b8 } -Next token is token 'a' (0xff94e7b8 'a') -0xff94e708->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e708, 0xff94e7b8 } -Shifting token 'a' (0xff94e708 'a') -0x5760f3f4->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e708 } -0xff94e708->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e708 } -Entering state 2 -Stack now 0 11 11 11 2 -0xff94e7c8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5760f3f4 'a') --> $$ = nterm item (0xff94e7c8 'a') -0x5760f3f4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e7c8 } -0x5760f3f4->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7c8 } -0xff94e7c8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e7c8 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xff94e70f->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4 } -0xff94e7b8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e70f } -0xff94e70f->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e70f, 0xff94e7b8 } -Next token is token 'p' (0xff94e7b8 'p'Exception caught: cleaning lookahead and stack -0x5760f3f4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e7b8 } -0x5760f3e4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7b8 } -0x5760f3d4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7b8 } -0x5760f3c4->Object::~Object { 0x5760f3c4, 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0xff94e7b8 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xff94e70f->Object::Object { } -0xff94e7b8->Object::Object { 0xff94e70f } -0xff94e70f->Object::~Object { 0xff94e70f, 0xff94e7b8 } -Next token is token 'a' (0xff94e7b8 'a') -0xff94e708->Object::Object { 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0xff94e708, 0xff94e7b8 } -Shifting token 'a' (0xff94e708 'a') -0x5760f3c4->Object::Object { 0xff94e708 } -0xff94e708->Object::~Object { 0x5760f3c4, 0xff94e708 } -Entering state 2 -Stack now 0 2 -0xff94e7c8->Object::Object { 0x5760f3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5760f3c4 'a') --> $$ = nterm item (0xff94e7c8 'a') -0x5760f3c4->Object::~Object { 0x5760f3c4, 0xff94e7c8 } -0x5760f3c4->Object::Object { 0xff94e7c8 } -0xff94e7c8->Object::~Object { 0x5760f3c4, 0xff94e7c8 } -Entering state 11 -Stack now 0 11 -Reading a token -0xff94e70f->Object::Object { 0x5760f3c4 } -0xff94e7b8->Object::Object { 0x5760f3c4, 0xff94e70f } -0xff94e70f->Object::~Object { 0x5760f3c4, 0xff94e70f, 0xff94e7b8 } -Next token is token 'a' (0xff94e7b8 'a') -0xff94e708->Object::Object { 0x5760f3c4, 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0x5760f3c4, 0xff94e708, 0xff94e7b8 } -Shifting token 'a' (0xff94e708 'a') -0x5760f3d4->Object::Object { 0x5760f3c4, 0xff94e708 } -0xff94e708->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e708 } -Entering state 2 -Stack now 0 11 2 -0xff94e7c8->Object::Object { 0x5760f3c4, 0x5760f3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5760f3d4 'a') --> $$ = nterm item (0xff94e7c8 'a') -0x5760f3d4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7c8 } -0x5760f3d4->Object::Object { 0x5760f3c4, 0xff94e7c8 } -0xff94e7c8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7c8 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xff94e70f->Object::Object { 0x5760f3c4, 0x5760f3d4 } -0xff94e7b8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0xff94e70f } -0xff94e70f->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e70f, 0xff94e7b8 } -Next token is token 'a' (0xff94e7b8 'a') -0xff94e708->Object::Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e708, 0xff94e7b8 } -Shifting token 'a' (0xff94e708 'a') -0x5760f3e4->Object::Object { 0x5760f3c4, 0x5760f3d4, 0xff94e708 } -0xff94e708->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e708 } -Entering state 2 -Stack now 0 11 11 2 -0xff94e7c8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5760f3e4 'a') --> $$ = nterm item (0xff94e7c8 'a') -0x5760f3e4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7c8 } -0x5760f3e4->Object::Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7c8 } -0xff94e7c8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7c8 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xff94e70f->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4 } -0xff94e7b8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e70f } -0xff94e70f->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e70f, 0xff94e7b8 } -Next token is token 'a' (0xff94e7b8 'a') -0xff94e708->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e708, 0xff94e7b8 } -Shifting token 'a' (0xff94e708 'a') -0x5760f3f4->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e708 } -0xff94e708->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e708 } -Entering state 2 -Stack now 0 11 11 11 2 -0xff94e7c8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5760f3f4 'a') --> $$ = nterm item (0xff94e7c8 'a') -0x5760f3f4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e7c8 } -0x5760f3f4->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7c8 } -0xff94e7c8->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e7c8 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xff94e70f->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4 } -0xff94e7b8->Object::Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e70f } -0xff94e70f->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e70f, 0xff94e7b8 } -Next token is token 'p' (0xff94e7b8 'p'Exception caught: cleaning lookahead and stack -0x5760f3f4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0x5760f3f4, 0xff94e7b8 } -0x5760f3e4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0x5760f3e4, 0xff94e7b8 } -0x5760f3d4->Object::~Object { 0x5760f3c4, 0x5760f3d4, 0xff94e7b8 } -0x5760f3c4->Object::~Object { 0x5760f3c4, 0xff94e7b8 } -0xff94e7b8->Object::~Object { 0xff94e7b8 } +0x57feeac0->Object::Object { 0x57feea00, 0x57feea30, 0x57feea60, 0x57feea90 } +Next token is token 'p' (0x57feeac0 'p'Exception caught: cleaning lookahead and stack +0x57feeac0->Object::~Object { 0x57feea00, 0x57feea30, 0x57feea60, 0x57feea90, 0x57feeac0 } +0x57feea90->Object::~Object { 0x57feea00, 0x57feea30, 0x57feea60, 0x57feea90 } +0x57feea60->Object::~Object { 0x57feea00, 0x57feea30, 0x57feea60 } +0x57feea30->Object::~Object { 0x57feea00, 0x57feea30 } +0x57feea00->Object::~Object { 0x57feea00 } exception caught: printer end { } -./c++.at:1362: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -stderr: -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -stdout: -./c++.at:1065: $PREPARSER ./input < in -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:857: $PREPARSER ./input -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1360: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1361: $PREPARSER ./input aaaas -./c++.at:1360: $PREPARSER ./input aaaap -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal -./c++.at:1360: $PREPARSER ./input --debug aaaap -stderr: -exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x56643a00->Object::Object { } -Next token is token 'a' (0x56643a00 'a') -Shifting token 'a' (0x56643a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56643a00 'a') --> $$ = nterm item (0x56643a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56643a30->Object::Object { 0x56643a00 } -Next token is token 'a' (0x56643a30 'a') -Shifting token 'a' (0x56643a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56643a30 'a') --> $$ = nterm item (0x56643a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56643a60->Object::Object { 0x56643a00, 0x56643a30 } -Next token is token 'a' (0x56643a60 'a') -Shifting token 'a' (0x56643a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56643a60 'a') --> $$ = nterm item (0x56643a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56643a90->Object::Object { 0x56643a00, 0x56643a30, 0x56643a60 } -Next token is token 'a' (0x56643a90 'a') -Shifting token 'a' (0x56643a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56643a90 'a') --> $$ = nterm item (0x56643a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x56643ac0->Object::Object { 0x56643a00, 0x56643a30, 0x56643a60, 0x56643a90 } -Next token is token 'p' (0x56643ac0 'p'Exception caught: cleaning lookahead and stack -0x56643ac0->Object::~Object { 0x56643a00, 0x56643a30, 0x56643a60, 0x56643a90, 0x56643ac0 } -0x56643a90->Object::~Object { 0x56643a00, 0x56643a30, 0x56643a60, 0x56643a90 } -0x56643a60->Object::~Object { 0x56643a00, 0x56643a30, 0x56643a60 } -0x56643a30->Object::~Object { 0x56643a00, 0x56643a30 } -0x56643a00->Object::~Object { 0x56643a00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x56643a00->Object::Object { } -Next token is token 'a' (0x56643a00 'a') -Shifting token 'a' (0x56643a00 'a') +0x57506a00->Object::Object { } +Next token is token 'a' (0x57506a00 'a') +Shifting token 'a' (0x57506a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56643a00 'a') --> $$ = nterm item (0x56643a00 'a') + $1 = token 'a' (0x57506a00 'a') +-> $$ = nterm item (0x57506a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x56643a30->Object::Object { 0x56643a00 } -Next token is token 'a' (0x56643a30 'a') -Shifting token 'a' (0x56643a30 'a') +0x57506a30->Object::Object { 0x57506a00 } +Next token is token 'a' (0x57506a30 'a') +Shifting token 'a' (0x57506a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56643a30 'a') --> $$ = nterm item (0x56643a30 'a') + $1 = token 'a' (0x57506a30 'a') +-> $$ = nterm item (0x57506a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x56643a60->Object::Object { 0x56643a00, 0x56643a30 } -Next token is token 'a' (0x56643a60 'a') -Shifting token 'a' (0x56643a60 'a') +0x57506a60->Object::Object { 0x57506a00, 0x57506a30 } +Next token is token 'a' (0x57506a60 'a') +Shifting token 'a' (0x57506a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56643a60 'a') --> $$ = nterm item (0x56643a60 'a') + $1 = token 'a' (0x57506a60 'a') +-> $$ = nterm item (0x57506a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x56643a90->Object::Object { 0x56643a00, 0x56643a30, 0x56643a60 } -Next token is token 'a' (0x56643a90 'a') -Shifting token 'a' (0x56643a90 'a') +0x57506a90->Object::Object { 0x57506a00, 0x57506a30, 0x57506a60 } +Next token is token 'a' (0x57506a90 'a') +Shifting token 'a' (0x57506a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56643a90 'a') --> $$ = nterm item (0x56643a90 'a') + $1 = token 'a' (0x57506a90 'a') +-> $$ = nterm item (0x57506a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x56643ac0->Object::Object { 0x56643a00, 0x56643a30, 0x56643a60, 0x56643a90 } -Next token is token 'p' (0x56643ac0 'p'Exception caught: cleaning lookahead and stack -0x56643ac0->Object::~Object { 0x56643a00, 0x56643a30, 0x56643a60, 0x56643a90, 0x56643ac0 } -0x56643a90->Object::~Object { 0x56643a00, 0x56643a30, 0x56643a60, 0x56643a90 } -0x56643a60->Object::~Object { 0x56643a00, 0x56643a30, 0x56643a60 } -0x56643a30->Object::~Object { 0x56643a00, 0x56643a30 } -0x56643a00->Object::~Object { 0x56643a00 } +0x57506ac0->Object::Object { 0x57506a00, 0x57506a30, 0x57506a60, 0x57506a90 } +Next token is token 'p' (0x57506ac0 'p'Exception caught: cleaning lookahead and stack +0x57506ac0->Object::~Object { 0x57506a00, 0x57506a30, 0x57506a60, 0x57506a90, 0x57506ac0 } +0x57506a90->Object::~Object { 0x57506a00, 0x57506a30, 0x57506a60, 0x57506a90 } +0x57506a60->Object::~Object { 0x57506a00, 0x57506a30, 0x57506a60 } +0x57506a30->Object::~Object { 0x57506a00, 0x57506a30 } +0x57506a00->Object::~Object { 0x57506a00 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr -./c++.at:1361: $PREPARSER ./input i stdout: -exception caught: printer stderr: +exception caught: printer ./c++.at:1360: $PREPARSER ./input aaaae -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap -stderr: -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0x581c6a00->Object::Object { } -Next token is token 'a' (0x581c6a00 'a') -Shifting token 'a' (0x581c6a00 'a') +0x57feea00->Object::Object { } +Next token is token 'a' (0x57feea00 'a') +Shifting token 'a' (0x57feea00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x581c6a00 'a') --> $$ = nterm item (0x581c6a00 'a') + $1 = token 'a' (0x57feea00 'a') +-> $$ = nterm item (0x57feea00 'a') Entering state 10 Stack now 0 10 Reading a token -0x581c6a30->Object::Object { 0x581c6a00 } -Next token is token 'a' (0x581c6a30 'a') -Shifting token 'a' (0x581c6a30 'a') +0x57feea30->Object::Object { 0x57feea00 } +Next token is token 'a' (0x57feea30 'a') +Shifting token 'a' (0x57feea30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x581c6a30 'a') --> $$ = nterm item (0x581c6a30 'a') + $1 = token 'a' (0x57feea30 'a') +-> $$ = nterm item (0x57feea30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x581c6a60->Object::Object { 0x581c6a00, 0x581c6a30 } -Next token is token 'a' (0x581c6a60 'a') -Shifting token 'a' (0x581c6a60 'a') +0x57feea60->Object::Object { 0x57feea00, 0x57feea30 } +Next token is token 'a' (0x57feea60 'a') +Shifting token 'a' (0x57feea60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x581c6a60 'a') --> $$ = nterm item (0x581c6a60 'a') + $1 = token 'a' (0x57feea60 'a') +-> $$ = nterm item (0x57feea60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x581c6a90->Object::Object { 0x581c6a00, 0x581c6a30, 0x581c6a60 } -Next token is token 'a' (0x581c6a90 'a') -Shifting token 'a' (0x581c6a90 'a') +0x57feea90->Object::Object { 0x57feea00, 0x57feea30, 0x57feea60 } +Next token is token 'a' (0x57feea90 'a') +Shifting token 'a' (0x57feea90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x581c6a90 'a') --> $$ = nterm item (0x581c6a90 'a') + $1 = token 'a' (0x57feea90 'a') +-> $$ = nterm item (0x57feea90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x581c6ac0->Object::Object { 0x581c6a00, 0x581c6a30, 0x581c6a60, 0x581c6a90 } -Next token is token 'p' (0x581c6ac0 'p'Exception caught: cleaning lookahead and stack -0x581c6ac0->Object::~Object { 0x581c6a00, 0x581c6a30, 0x581c6a60, 0x581c6a90, 0x581c6ac0 } -0x581c6a90->Object::~Object { 0x581c6a00, 0x581c6a30, 0x581c6a60, 0x581c6a90 } -0x581c6a60->Object::~Object { 0x581c6a00, 0x581c6a30, 0x581c6a60 } -0x581c6a30->Object::~Object { 0x581c6a00, 0x581c6a30 } -0x581c6a00->Object::~Object { 0x581c6a00 } +0x57feeac0->Object::Object { 0x57feea00, 0x57feea30, 0x57feea60, 0x57feea90 } +Next token is token 'p' (0x57feeac0 'p'Exception caught: cleaning lookahead and stack +0x57feeac0->Object::~Object { 0x57feea00, 0x57feea30, 0x57feea60, 0x57feea90, 0x57feeac0 } +0x57feea90->Object::~Object { 0x57feea00, 0x57feea30, 0x57feea60, 0x57feea90 } +0x57feea60->Object::~Object { 0x57feea00, 0x57feea30, 0x57feea60 } +0x57feea30->Object::~Object { 0x57feea00, 0x57feea30 } +0x57feea00->Object::~Object { 0x57feea00 } exception caught: printer end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x581c6a00->Object::Object { } -Next token is token 'a' (0x581c6a00 'a') -Shifting token 'a' (0x581c6a00 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x581c6a00 'a') --> $$ = nterm item (0x581c6a00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x581c6a30->Object::Object { 0x581c6a00 } -Next token is token 'a' (0x581c6a30 'a') -Shifting token 'a' (0x581c6a30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x581c6a30 'a') --> $$ = nterm item (0x581c6a30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x581c6a60->Object::Object { 0x581c6a00, 0x581c6a30 } -Next token is token 'a' (0x581c6a60 'a') -Shifting token 'a' (0x581c6a60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x581c6a60 'a') --> $$ = nterm item (0x581c6a60 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x581c6a90->Object::Object { 0x581c6a00, 0x581c6a30, 0x581c6a60 } -Next token is token 'a' (0x581c6a90 'a') -Shifting token 'a' (0x581c6a90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x581c6a90 'a') --> $$ = nterm item (0x581c6a90 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x581c6ac0->Object::Object { 0x581c6a00, 0x581c6a30, 0x581c6a60, 0x581c6a90 } -Next token is token 'p' (0x581c6ac0 'p'Exception caught: cleaning lookahead and stack -0x581c6ac0->Object::~Object { 0x581c6a00, 0x581c6a30, 0x581c6a60, 0x581c6a90, 0x581c6ac0 } -0x581c6a90->Object::~Object { 0x581c6a00, 0x581c6a30, 0x581c6a60, 0x581c6a90 } -0x581c6a60->Object::~Object { 0x581c6a00, 0x581c6a30, 0x581c6a60 } -0x581c6a30->Object::~Object { 0x581c6a00, 0x581c6a30 } -0x581c6a00->Object::~Object { 0x581c6a00 } -exception caught: printer -end { } ./c++.at:1361: grep '^exception caught: printer$' stderr -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -======== Testing with C++ standard flags: '' exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: syntax error +./c++.at:1360: $PREPARSER ./input aaaaE ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stdout: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $here/modern +stdout: ./c++.at:1361: $PREPARSER ./input aaaaE +Modern C++: 201402 +./c++.at:1360: $PREPARSER ./input aaaaT +./c++.at:569: $PREPARSER ./list +stderr: stderr: exception caught: syntax error, unexpected end of file, expecting 'a' +stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:1360: $PREPARSER ./input aaaaR +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaT stderr: +stderr: +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1361: $PREPARSER ./input aaaaR stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -253909,715 +252451,611 @@ ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:857: $PREPARSER ./input +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: stderr: +./c++.at:850: $PREPARSER ./input stdout: -./c++.at:1066: $PREPARSER ./input < in +./c++.at:235: $PREPARSER ./list stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in +======== Testing with C++ standard flags: '' +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +======== Testing with C++ standard flags: '' stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:566: $here/modern +stdout: +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +Modern C++: 201703 +./c++.at:566: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:851: $PREPARSER ./input +./c++.at:856: $PREPARSER ./input stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: $here/modern +stdout: +Modern C++: 201402 +./c++.at:573: $PREPARSER ./list stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: +./c++.at:574: $here/modern stdout: -./c++.at:1362: $PREPARSER ./input aaaas +Modern C++: 201402 +./c++.at:574: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:857: $PREPARSER ./input +./c++.at:1362: $PREPARSER ./input aaaas stderr: exception caught: reduction -./c++.at:1064: $PREPARSER ./input < in -stderr: -678. c++.at:848: ok -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./c++.at:1064: $PREPARSER ./input < in -./c++.at:1065: $PREPARSER ./input < in -stderr: -stderr: -error: invalid expression ./c++.at:1362: $PREPARSER ./input aaaal -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: exception caught: yylex -./c++.at:1065: $PREPARSER ./input < in -./c++.at:1064: $PREPARSER ./input < in ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ... -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -stderr: -error: invalid character -stdout: -./c++.at:1361: $PREPARSER ./input aaaas ./c++.at:1362: $PREPARSER ./input i -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -exception caught: reduction stderr: exception caught: initial-action -682. c++.at:1064: ok -683. c++.at:1065: ok - - -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal ./c++.at:1362: $PREPARSER ./input aaaap stderr: -stdout: -./c++.at:1360: $PREPARSER ./input aaaas -690. c++.at:1422: testing Shared locations ... -689. c++.at:1371: testing C++ GLR parser identifier shadowing ... -./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x1.cc x1.yy -stderr: -exception caught: reduction -stderr: -exception caught: yylex -stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1362: $PREPARSER ./input --debug aaaap -./c++.at:1360: $PREPARSER ./input aaaal -./c++.at:1361: $PREPARSER ./input i -./c++.at:1456: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x1.o x1.cc -stderr: -======== Testing with C++ standard flags: '' -stderr: -exception caught: yylex stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: initial-action -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0xffa3774f->Object::Object { } -0xffa377f8->Object::Object { 0xffa3774f } -0xffa3774f->Object::~Object { 0xffa3774f, 0xffa377f8 } -Next token is token 'a' (0xffa377f8 'a') -0xffa37760->Object::Object { 0xffa377f8 } -0xffa376eb->Object::Object { 0xffa37760, 0xffa377f8 } -0xffa376eb->Object::~Object { 0xffa376eb, 0xffa37760, 0xffa377f8 } -0xffa377f8->Object::~Object { 0xffa37760, 0xffa377f8 } -Shifting token 'a' (0xffa37760 'a') -0x56e1f3c4->Object::Object { 0xffa37760 } -0xffa376ef->Object::Object { 0x56e1f3c4, 0xffa37760 } -0xffa376ef->Object::~Object { 0x56e1f3c4, 0xffa376ef, 0xffa37760 } -0xffa37760->Object::~Object { 0x56e1f3c4, 0xffa37760 } +0xffa1e8bf->Object::Object { } +0xffa1e968->Object::Object { 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0xffa1e8bf, 0xffa1e968 } +Next token is token 'a' (0xffa1e968 'a') +0xffa1e8b8->Object::Object { 0xffa1e968 } +0xffa1e968->Object::~Object { 0xffa1e8b8, 0xffa1e968 } +Shifting token 'a' (0xffa1e8b8 'a') +0x56c883c4->Object::Object { 0xffa1e8b8 } +0xffa1e8b8->Object::~Object { 0x56c883c4, 0xffa1e8b8 } Entering state 2 Stack now 0 2 -0xffa37808->Object::Object { 0x56e1f3c4 } +0xffa1e978->Object::Object { 0x56c883c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e1f3c4 'a') --> $$ = nterm item (0xffa37808 'a') -0x56e1f3c4->Object::~Object { 0x56e1f3c4, 0xffa37808 } -0x56e1f3c4->Object::Object { 0xffa37808 } -0xffa3774f->Object::Object { 0x56e1f3c4, 0xffa37808 } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0xffa3774f, 0xffa37808 } -0xffa37808->Object::~Object { 0x56e1f3c4, 0xffa37808 } + $1 = token 'a' (0x56c883c4 'a') +-> $$ = nterm item (0xffa1e978 'a') +0x56c883c4->Object::~Object { 0x56c883c4, 0xffa1e978 } +0x56c883c4->Object::Object { 0xffa1e978 } +0xffa1e978->Object::~Object { 0x56c883c4, 0xffa1e978 } Entering state 11 Stack now 0 11 Reading a token -0xffa3774f->Object::Object { 0x56e1f3c4 } -0xffa377f8->Object::Object { 0x56e1f3c4, 0xffa3774f } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0xffa3774f, 0xffa377f8 } -Next token is token 'a' (0xffa377f8 'a') -0xffa37760->Object::Object { 0x56e1f3c4, 0xffa377f8 } -0xffa376eb->Object::Object { 0x56e1f3c4, 0xffa37760, 0xffa377f8 } -0xffa376eb->Object::~Object { 0x56e1f3c4, 0xffa376eb, 0xffa37760, 0xffa377f8 } -0xffa377f8->Object::~Object { 0x56e1f3c4, 0xffa37760, 0xffa377f8 } -Shifting token 'a' (0xffa37760 'a') -0x56e1f3d4->Object::Object { 0x56e1f3c4, 0xffa37760 } -0xffa376ef->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760 } -0xffa376ef->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa376ef, 0xffa37760 } -0xffa37760->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760 } +0xffa1e8bf->Object::Object { 0x56c883c4 } +0xffa1e968->Object::Object { 0x56c883c4, 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0x56c883c4, 0xffa1e8bf, 0xffa1e968 } +Next token is token 'a' (0xffa1e968 'a') +0xffa1e8b8->Object::Object { 0x56c883c4, 0xffa1e968 } +0xffa1e968->Object::~Object { 0x56c883c4, 0xffa1e8b8, 0xffa1e968 } +Shifting token 'a' (0xffa1e8b8 'a') +0x56c883d4->Object::Object { 0x56c883c4, 0xffa1e8b8 } +0xffa1e8b8->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e8b8 } Entering state 2 Stack now 0 11 2 -0xffa37808->Object::Object { 0x56e1f3c4, 0x56e1f3d4 } +0xffa1e978->Object::Object { 0x56c883c4, 0x56c883d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e1f3d4 'a') --> $$ = nterm item (0xffa37808 'a') -0x56e1f3d4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37808 } -0x56e1f3d4->Object::Object { 0x56e1f3c4, 0xffa37808 } -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37808 } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa3774f, 0xffa37808 } -0xffa37808->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37808 } + $1 = token 'a' (0x56c883d4 'a') +-> $$ = nterm item (0xffa1e978 'a') +0x56c883d4->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e978 } +0x56c883d4->Object::Object { 0x56c883c4, 0xffa1e978 } +0xffa1e978->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e978 } Entering state 11 Stack now 0 11 11 Reading a token -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4 } -0xffa377f8->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa3774f } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa3774f, 0xffa377f8 } -Next token is token 'a' (0xffa377f8 'a') -0xffa37760->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa377f8 } -0xffa376eb->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760, 0xffa377f8 } -0xffa376eb->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa376eb, 0xffa37760, 0xffa377f8 } -0xffa377f8->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760, 0xffa377f8 } -Shifting token 'a' (0xffa37760 'a') -0x56e1f3e4->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760 } -0xffa376ef->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760 } -0xffa376ef->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa376ef, 0xffa37760 } -0xffa37760->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760 } +0xffa1e8bf->Object::Object { 0x56c883c4, 0x56c883d4 } +0xffa1e968->Object::Object { 0x56c883c4, 0x56c883d4, 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e8bf, 0xffa1e968 } +Next token is token 'a' (0xffa1e968 'a') +0xffa1e8b8->Object::Object { 0x56c883c4, 0x56c883d4, 0xffa1e968 } +0xffa1e968->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e8b8, 0xffa1e968 } +Shifting token 'a' (0xffa1e8b8 'a') +0x56c883e4->Object::Object { 0x56c883c4, 0x56c883d4, 0xffa1e8b8 } +0xffa1e8b8->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8b8 } Entering state 2 Stack now 0 11 11 2 -0xffa37808->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4 } +0xffa1e978->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e1f3e4 'a') --> $$ = nterm item (0xffa37808 'a') -0x56e1f3e4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37808 } -0x56e1f3e4->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37808 } -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37808 } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa3774f, 0xffa37808 } -0xffa37808->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37808 } + $1 = token 'a' (0x56c883e4 'a') +-> $$ = nterm item (0xffa1e978 'a') +0x56c883e4->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e978 } +0x56c883e4->Object::Object { 0x56c883c4, 0x56c883d4, 0xffa1e978 } +0xffa1e978->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e978 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4 } -0xffa377f8->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa3774f } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa3774f, 0xffa377f8 } -Next token is token 'a' (0xffa377f8 'a') -0xffa37760->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa377f8 } -0xffa376eb->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760, 0xffa377f8 } -0xffa376eb->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa376eb, 0xffa37760, 0xffa377f8 } -0xffa377f8->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760, 0xffa377f8 } -Shifting token 'a' (0xffa37760 'a') -0x56e1f3f4->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760 } -0xffa376ef->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37760 } -0xffa376ef->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa376ef, 0xffa37760 } -0xffa37760->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37760 } +0xffa1e8bf->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4 } +0xffa1e968->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8bf, 0xffa1e968 } +Next token is token 'a' (0xffa1e968 'a') +0xffa1e8b8->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e968 } +0xffa1e968->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8b8, 0xffa1e968 } +Shifting token 'a' (0xffa1e8b8 'a') +0x56c883f4->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8b8 } +0xffa1e8b8->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e8b8 } Entering state 2 Stack now 0 11 11 11 2 -0xffa37808->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4 } +0xffa1e978->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e1f3f4 'a') --> $$ = nterm item (0xffa37808 'a') -0x56e1f3f4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37808 } -0x56e1f3f4->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37808 } -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37808 } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa3774f, 0xffa37808 } -0xffa37808->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37808 } + $1 = token 'a' (0x56c883f4 'a') +-> $$ = nterm item (0xffa1e978 'a') +0x56c883f4->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e978 } +0x56c883f4->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e978 } +0xffa1e978->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e978 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4 } -0xffa377f8->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa3774f } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa3774f, 0xffa377f8 } -Next token is token 'p' (0xffa377f8 'p'Exception caught: cleaning lookahead and stack -0x56e1f3f4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa377f8 } -0x56e1f3e4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa377f8 } -0x56e1f3d4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa377f8 } -0x56e1f3c4->Object::~Object { 0x56e1f3c4, 0xffa377f8 } -0xffa377f8->Object::~Object { 0xffa377f8 } +0xffa1e8bf->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4 } +0xffa1e968->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e8bf, 0xffa1e968 } +Next token is token 'p' (0xffa1e968 'p'Exception caught: cleaning lookahead and stack +0x56c883f4->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e968 } +0x56c883e4->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e968 } +0x56c883d4->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e968 } +0x56c883c4->Object::~Object { 0x56c883c4, 0xffa1e968 } +0xffa1e968->Object::~Object { 0xffa1e968 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input i stderr: -./c++.at:1361: $PREPARSER ./input aaaap Starting parse Entering state 0 Stack now 0 Reading a token -0xffa3774f->Object::Object { } -0xffa377f8->Object::Object { 0xffa3774f } -0xffa3774f->Object::~Object { 0xffa3774f, 0xffa377f8 } -Next token is token 'a' (0xffa377f8 'a') -0xffa37760->Object::Object { 0xffa377f8 } -0xffa376eb->Object::Object { 0xffa37760, 0xffa377f8 } -0xffa376eb->Object::~Object { 0xffa376eb, 0xffa37760, 0xffa377f8 } -0xffa377f8->Object::~Object { 0xffa37760, 0xffa377f8 } -Shifting token 'a' (0xffa37760 'a') -0x56e1f3c4->Object::Object { 0xffa37760 } -0xffa376ef->Object::Object { 0x56e1f3c4, 0xffa37760 } -0xffa376ef->Object::~Object { 0x56e1f3c4, 0xffa376ef, 0xffa37760 } -0xffa37760->Object::~Object { 0x56e1f3c4, 0xffa37760 } +0xffa1e8bf->Object::Object { } +0xffa1e968->Object::Object { 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0xffa1e8bf, 0xffa1e968 } +Next token is token 'a' (0xffa1e968 'a') +0xffa1e8b8->Object::Object { 0xffa1e968 } +0xffa1e968->Object::~Object { 0xffa1e8b8, 0xffa1e968 } +Shifting token 'a' (0xffa1e8b8 'a') +0x56c883c4->Object::Object { 0xffa1e8b8 } +0xffa1e8b8->Object::~Object { 0x56c883c4, 0xffa1e8b8 } Entering state 2 Stack now 0 2 -0xffa37808->Object::Object { 0x56e1f3c4 } +0xffa1e978->Object::Object { 0x56c883c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e1f3c4 'a') --> $$ = nterm item (0xffa37808 'a') -0x56e1f3c4->Object::~Object { 0x56e1f3c4, 0xffa37808 } -0x56e1f3c4->Object::Object { 0xffa37808 } -0xffa3774f->Object::Object { 0x56e1f3c4, 0xffa37808 } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0xffa3774f, 0xffa37808 } -0xffa37808->Object::~Object { 0x56e1f3c4, 0xffa37808 } + $1 = token 'a' (0x56c883c4 'a') +-> $$ = nterm item (0xffa1e978 'a') +0x56c883c4->Object::~Object { 0x56c883c4, 0xffa1e978 } +0x56c883c4->Object::Object { 0xffa1e978 } +0xffa1e978->Object::~Object { 0x56c883c4, 0xffa1e978 } Entering state 11 Stack now 0 11 Reading a token -0xffa3774f->Object::Object { 0x56e1f3c4 } -0xffa377f8->Object::Object { 0x56e1f3c4, 0xffa3774f } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0xffa3774f, 0xffa377f8 } -Next token is token 'a' (0xffa377f8 'a') -0xffa37760->Object::Object { 0x56e1f3c4, 0xffa377f8 } -0xffa376eb->Object::Object { 0x56e1f3c4, 0xffa37760, 0xffa377f8 } -0xffa376eb->Object::~Object { 0x56e1f3c4, 0xffa376eb, 0xffa37760, 0xffa377f8 } -0xffa377f8->Object::~Object { 0x56e1f3c4, 0xffa37760, 0xffa377f8 } -Shifting token 'a' (0xffa37760 'a') -0x56e1f3d4->Object::Object { 0x56e1f3c4, 0xffa37760 } -0xffa376ef->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760 } -0xffa376ef->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa376ef, 0xffa37760 } -0xffa37760->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760 } +0xffa1e8bf->Object::Object { 0x56c883c4 } +0xffa1e968->Object::Object { 0x56c883c4, 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0x56c883c4, 0xffa1e8bf, 0xffa1e968 } +Next token is token 'a' (0xffa1e968 'a') +0xffa1e8b8->Object::Object { 0x56c883c4, 0xffa1e968 } +0xffa1e968->Object::~Object { 0x56c883c4, 0xffa1e8b8, 0xffa1e968 } +Shifting token 'a' (0xffa1e8b8 'a') +0x56c883d4->Object::Object { 0x56c883c4, 0xffa1e8b8 } +0xffa1e8b8->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e8b8 } Entering state 2 Stack now 0 11 2 -0xffa37808->Object::Object { 0x56e1f3c4, 0x56e1f3d4 } +0xffa1e978->Object::Object { 0x56c883c4, 0x56c883d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e1f3d4 'a') --> $$ = nterm item (0xffa37808 'a') -0x56e1f3d4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37808 } -0x56e1f3d4->Object::Object { 0x56e1f3c4, 0xffa37808 } -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37808 } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa3774f, 0xffa37808 } -0xffa37808->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37808 } + $1 = token 'a' (0x56c883d4 'a') +-> $$ = nterm item (0xffa1e978 'a') +0x56c883d4->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e978 } +0x56c883d4->Object::Object { 0x56c883c4, 0xffa1e978 } +0xffa1e978->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e978 } Entering state 11 Stack now 0 11 11 Reading a token -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4 } -0xffa377f8->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa3774f } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa3774f, 0xffa377f8 } -Next token is token 'a' (0xffa377f8 'a') -0xffa37760->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa377f8 } -0xffa376eb->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760, 0xffa377f8 } -0xffa376eb->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa376eb, 0xffa37760, 0xffa377f8 } -0xffa377f8->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760, 0xffa377f8 } -Shifting token 'a' (0xffa37760 'a') -0x56e1f3e4->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37760 } -0xffa376ef->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760 } -0xffa376ef->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa376ef, 0xffa37760 } -0xffa37760->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760 } +0xffa1e8bf->Object::Object { 0x56c883c4, 0x56c883d4 } +0xffa1e968->Object::Object { 0x56c883c4, 0x56c883d4, 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e8bf, 0xffa1e968 } +Next token is token 'a' (0xffa1e968 'a') +0xffa1e8b8->Object::Object { 0x56c883c4, 0x56c883d4, 0xffa1e968 } +0xffa1e968->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e8b8, 0xffa1e968 } +Shifting token 'a' (0xffa1e8b8 'a') +0x56c883e4->Object::Object { 0x56c883c4, 0x56c883d4, 0xffa1e8b8 } +0xffa1e8b8->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8b8 } Entering state 2 Stack now 0 11 11 2 -0xffa37808->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4 } +0xffa1e978->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e1f3e4 'a') --> $$ = nterm item (0xffa37808 'a') -0x56e1f3e4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37808 } -0x56e1f3e4->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa37808 } -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37808 } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa3774f, 0xffa37808 } -0xffa37808->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37808 } + $1 = token 'a' (0x56c883e4 'a') +-> $$ = nterm item (0xffa1e978 'a') +0x56c883e4->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e978 } +0x56c883e4->Object::Object { 0x56c883c4, 0x56c883d4, 0xffa1e978 } +0xffa1e978->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e978 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4 } -0xffa377f8->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa3774f } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa3774f, 0xffa377f8 } -Next token is token 'a' (0xffa377f8 'a') -0xffa37760->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa377f8 } -0xffa376eb->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760, 0xffa377f8 } -0xffa376eb->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa376eb, 0xffa37760, 0xffa377f8 } -0xffa377f8->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760, 0xffa377f8 } -Shifting token 'a' (0xffa37760 'a') -0x56e1f3f4->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37760 } -0xffa376ef->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37760 } -0xffa376ef->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa376ef, 0xffa37760 } -0xffa37760->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37760 } +0xffa1e8bf->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4 } +0xffa1e968->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8bf, 0xffa1e968 } +Next token is token 'a' (0xffa1e968 'a') +0xffa1e8b8->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e968 } +0xffa1e968->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8b8, 0xffa1e968 } +Shifting token 'a' (0xffa1e8b8 'a') +0x56c883f4->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e8b8 } +0xffa1e8b8->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e8b8 } Entering state 2 Stack now 0 11 11 11 2 -0xffa37808->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4 } +0xffa1e978->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e1f3f4 'a') --> $$ = nterm item (0xffa37808 'a') -0x56e1f3f4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37808 } -0x56e1f3f4->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa37808 } -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37808 } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa3774f, 0xffa37808 } -0xffa37808->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa37808 } + $1 = token 'a' (0x56c883f4 'a') +-> $$ = nterm item (0xffa1e978 'a') +0x56c883f4->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e978 } +0x56c883f4->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e978 } +0xffa1e978->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e978 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xffa3774f->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4 } -0xffa377f8->Object::Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa3774f } -0xffa3774f->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa3774f, 0xffa377f8 } -Next token is token 'p' (0xffa377f8 'p'Exception caught: cleaning lookahead and stack -0x56e1f3f4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0x56e1f3f4, 0xffa377f8 } -0x56e1f3e4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0x56e1f3e4, 0xffa377f8 } -0x56e1f3d4->Object::~Object { 0x56e1f3c4, 0x56e1f3d4, 0xffa377f8 } -0x56e1f3c4->Object::~Object { 0x56e1f3c4, 0xffa377f8 } -0xffa377f8->Object::~Object { 0xffa377f8 } +0xffa1e8bf->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4 } +0xffa1e968->Object::Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e8bf } +0xffa1e8bf->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e8bf, 0xffa1e968 } +Next token is token 'p' (0xffa1e968 'p'Exception caught: cleaning lookahead and stack +0x56c883f4->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0x56c883f4, 0xffa1e968 } +0x56c883e4->Object::~Object { 0x56c883c4, 0x56c883d4, 0x56c883e4, 0xffa1e968 } +0x56c883d4->Object::~Object { 0x56c883c4, 0x56c883d4, 0xffa1e968 } +0x56c883c4->Object::~Object { 0x56c883c4, 0xffa1e968 } +0xffa1e968->Object::~Object { 0xffa1e968 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i +stderr: exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: printer -./c++.at:1361: $PREPARSER ./input --debug aaaap -./c++.at:1362: $PREPARSER ./input aaaae -./c++.at:1360: $PREPARSER ./input aaaap +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x5822ca00->Object::Object { } -Next token is token 'a' (0x5822ca00 'a') -Shifting token 'a' (0x5822ca00 'a') +0xffdc42bf->Object::Object { } +0xffdc4368->Object::Object { 0xffdc42bf } +0xffdc42bf->Object::~Object { 0xffdc42bf, 0xffdc4368 } +Next token is token 'a' (0xffdc4368 'a') +0xffdc42b8->Object::Object { 0xffdc4368 } +0xffdc4368->Object::~Object { 0xffdc42b8, 0xffdc4368 } +Shifting token 'a' (0xffdc42b8 'a') +0x5780f3c4->Object::Object { 0xffdc42b8 } +0xffdc42b8->Object::~Object { 0x5780f3c4, 0xffdc42b8 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5822ca00 'a') --> $$ = nterm item (0x5822ca00 'a') +0xffdc4378->Object::Object { 0x5780f3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5780f3c4 'a') +-> $$ = nterm item (0xffdc4378 'a') +0x5780f3c4->Object::~Object { 0x5780f3c4, 0xffdc4378 } +0x5780f3c4->Object::Object { 0xffdc4378 } +0xffdc4378->Object::~Object { 0x5780f3c4, 0xffdc4378 } Entering state 10 Stack now 0 10 Reading a token -0x5822ca30->Object::Object { 0x5822ca00 } -Next token is token 'a' (0x5822ca30 'a') -Shifting token 'a' (0x5822ca30 'a') +0xffdc42bf->Object::Object { 0x5780f3c4 } +0xffdc4368->Object::Object { 0x5780f3c4, 0xffdc42bf } +0xffdc42bf->Object::~Object { 0x5780f3c4, 0xffdc42bf, 0xffdc4368 } +Next token is token 'a' (0xffdc4368 'a') +0xffdc42b8->Object::Object { 0x5780f3c4, 0xffdc4368 } +0xffdc4368->Object::~Object { 0x5780f3c4, 0xffdc42b8, 0xffdc4368 } +Shifting token 'a' (0xffdc42b8 'a') +0x5780f3d4->Object::Object { 0x5780f3c4, 0xffdc42b8 } +0xffdc42b8->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42b8 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5822ca30 'a') --> $$ = nterm item (0x5822ca30 'a') +0xffdc4378->Object::Object { 0x5780f3c4, 0x5780f3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5780f3d4 'a') +-> $$ = nterm item (0xffdc4378 'a') +0x5780f3d4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4378 } +0x5780f3d4->Object::Object { 0x5780f3c4, 0xffdc4378 } +0xffdc4378->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4378 } Entering state 10 Stack now 0 10 10 Reading a token -0x5822ca60->Object::Object { 0x5822ca00, 0x5822ca30 } -Next token is token 'a' (0x5822ca60 'a') -Shifting token 'a' (0x5822ca60 'a') +0xffdc42bf->Object::Object { 0x5780f3c4, 0x5780f3d4 } +0xffdc4368->Object::Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42bf } +0xffdc42bf->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42bf, 0xffdc4368 } +Next token is token 'a' (0xffdc4368 'a') +0xffdc42b8->Object::Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4368 } +0xffdc4368->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42b8, 0xffdc4368 } +Shifting token 'a' (0xffdc42b8 'a') +0x5780f3e4->Object::Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42b8 } +0xffdc42b8->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42b8 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5822ca60 'a') --> $$ = nterm item (0x5822ca60 'a') +0xffdc4378->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5780f3e4 'a') +-> $$ = nterm item (0xffdc4378 'a') +0x5780f3e4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4378 } +0x5780f3e4->Object::Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4378 } +0xffdc4378->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4378 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x5822ca90->Object::Object { 0x5822ca00, 0x5822ca30, 0x5822ca60 } -Next token is token 'a' (0x5822ca90 'a') -Shifting token 'a' (0x5822ca90 'a') +0xffdc42bf->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4 } +0xffdc4368->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42bf } +0xffdc42bf->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42bf, 0xffdc4368 } +Next token is token 'a' (0xffdc4368 'a') +0xffdc42b8->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4368 } +0xffdc4368->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42b8, 0xffdc4368 } +Shifting token 'a' (0xffdc42b8 'a') +0x5780f3f4->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42b8 } +0xffdc42b8->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc42b8 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5822ca90 'a') --> $$ = nterm item (0x5822ca90 'a') +0xffdc4378->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5780f3f4 'a') +-> $$ = nterm item (0xffdc4378 'a') +0x5780f3f4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc4378 } +0x5780f3f4->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4378 } +0xffdc4378->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc4378 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x5822cac0->Object::Object { 0x5822ca00, 0x5822ca30, 0x5822ca60, 0x5822ca90 } -Next token is token 'p' (0x5822cac0 'p'Exception caught: cleaning lookahead and stack -0x5822cac0->Object::~Object { 0x5822ca00, 0x5822ca30, 0x5822ca60, 0x5822ca90, 0x5822cac0 } -0x5822ca90->Object::~Object { 0x5822ca00, 0x5822ca30, 0x5822ca60, 0x5822ca90 } -0x5822ca60->Object::~Object { 0x5822ca00, 0x5822ca30, 0x5822ca60 } -0x5822ca30->Object::~Object { 0x5822ca00, 0x5822ca30 } -0x5822ca00->Object::~Object { 0x5822ca00 } +0xffdc42bf->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4 } +0xffdc4368->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc42bf } +0xffdc42bf->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc42bf, 0xffdc4368 } +Next token is token 'p' (0xffdc4368 'p'Exception caught: cleaning lookahead and stack +0x5780f3f4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc4368 } +0x5780f3e4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4368 } +0x5780f3d4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4368 } +0x5780f3c4->Object::~Object { 0x5780f3c4, 0xffdc4368 } +0xffdc4368->Object::~Object { 0xffdc4368 } exception caught: printer end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0x5822ca00->Object::Object { } -Next token is token 'a' (0x5822ca00 'a') -Shifting token 'a' (0x5822ca00 'a') +0xffdc42bf->Object::Object { } +0xffdc4368->Object::Object { 0xffdc42bf } +0xffdc42bf->Object::~Object { 0xffdc42bf, 0xffdc4368 } +Next token is token 'a' (0xffdc4368 'a') +0xffdc42b8->Object::Object { 0xffdc4368 } +0xffdc4368->Object::~Object { 0xffdc42b8, 0xffdc4368 } +Shifting token 'a' (0xffdc42b8 'a') +0x5780f3c4->Object::Object { 0xffdc42b8 } +0xffdc42b8->Object::~Object { 0x5780f3c4, 0xffdc42b8 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5822ca00 'a') --> $$ = nterm item (0x5822ca00 'a') +0xffdc4378->Object::Object { 0x5780f3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5780f3c4 'a') +-> $$ = nterm item (0xffdc4378 'a') +0x5780f3c4->Object::~Object { 0x5780f3c4, 0xffdc4378 } +0x5780f3c4->Object::Object { 0xffdc4378 } +0xffdc4378->Object::~Object { 0x5780f3c4, 0xffdc4378 } Entering state 10 Stack now 0 10 Reading a token -0x5822ca30->Object::Object { 0x5822ca00 } -Next token is token 'a' (0x5822ca30 'a') -Shifting token 'a' (0x5822ca30 'a') +0xffdc42bf->Object::Object { 0x5780f3c4 } +0xffdc4368->Object::Object { 0x5780f3c4, 0xffdc42bf } +0xffdc42bf->Object::~Object { 0x5780f3c4, 0xffdc42bf, 0xffdc4368 } +Next token is token 'a' (0xffdc4368 'a') +0xffdc42b8->Object::Object { 0x5780f3c4, 0xffdc4368 } +0xffdc4368->Object::~Object { 0x5780f3c4, 0xffdc42b8, 0xffdc4368 } +Shifting token 'a' (0xffdc42b8 'a') +0x5780f3d4->Object::Object { 0x5780f3c4, 0xffdc42b8 } +0xffdc42b8->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42b8 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5822ca30 'a') --> $$ = nterm item (0x5822ca30 'a') +0xffdc4378->Object::Object { 0x5780f3c4, 0x5780f3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5780f3d4 'a') +-> $$ = nterm item (0xffdc4378 'a') +0x5780f3d4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4378 } +0x5780f3d4->Object::Object { 0x5780f3c4, 0xffdc4378 } +0xffdc4378->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4378 } Entering state 10 Stack now 0 10 10 Reading a token -0x5822ca60->Object::Object { 0x5822ca00, 0x5822ca30 } -Next token is token 'a' (0x5822ca60 'a') -Shifting token 'a' (0x5822ca60 'a') +0xffdc42bf->Object::Object { 0x5780f3c4, 0x5780f3d4 } +0xffdc4368->Object::Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42bf } +0xffdc42bf->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42bf, 0xffdc4368 } +Next token is token 'a' (0xffdc4368 'a') +0xffdc42b8->Object::Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4368 } +0xffdc4368->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42b8, 0xffdc4368 } +Shifting token 'a' (0xffdc42b8 'a') +0x5780f3e4->Object::Object { 0x5780f3c4, 0x5780f3d4, 0xffdc42b8 } +0xffdc42b8->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42b8 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5822ca60 'a') --> $$ = nterm item (0x5822ca60 'a') +0xffdc4378->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5780f3e4 'a') +-> $$ = nterm item (0xffdc4378 'a') +0x5780f3e4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4378 } +0x5780f3e4->Object::Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4378 } +0xffdc4378->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4378 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x5822ca90->Object::Object { 0x5822ca00, 0x5822ca30, 0x5822ca60 } -Next token is token 'a' (0x5822ca90 'a') -Shifting token 'a' (0x5822ca90 'a') +0xffdc42bf->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4 } +0xffdc4368->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42bf } +0xffdc42bf->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42bf, 0xffdc4368 } +Next token is token 'a' (0xffdc4368 'a') +0xffdc42b8->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4368 } +0xffdc4368->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42b8, 0xffdc4368 } +Shifting token 'a' (0xffdc42b8 'a') +0x5780f3f4->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc42b8 } +0xffdc42b8->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc42b8 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5822ca90 'a') --> $$ = nterm item (0x5822ca90 'a') +0xffdc4378->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5780f3f4 'a') +-> $$ = nterm item (0xffdc4378 'a') +0x5780f3f4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc4378 } +0x5780f3f4->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4378 } +0xffdc4378->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc4378 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x5822cac0->Object::Object { 0x5822ca00, 0x5822ca30, 0x5822ca60, 0x5822ca90 } -Next token is token 'p' (0x5822cac0 'p'Exception caught: cleaning lookahead and stack -0x5822cac0->Object::~Object { 0x5822ca00, 0x5822ca30, 0x5822ca60, 0x5822ca90, 0x5822cac0 } -0x5822ca90->Object::~Object { 0x5822ca00, 0x5822ca30, 0x5822ca60, 0x5822ca90 } -0x5822ca60->Object::~Object { 0x5822ca00, 0x5822ca30, 0x5822ca60 } -0x5822ca30->Object::~Object { 0x5822ca00, 0x5822ca30 } -0x5822ca00->Object::~Object { 0x5822ca00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaaE -./c++.at:1361: $PREPARSER ./input aaaae -./c++.at:1360: $PREPARSER ./input --debug aaaap -stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x56f46a00->Object::Object { } -Next token is token 'a' (0x56f46a00 'a') -Shifting token 'a' (0x56f46a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f46a00 'a') --> $$ = nterm item (0x56f46a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56f46a30->Object::Object { 0x56f46a00 } -Next token is token 'a' (0x56f46a30 'a') -Shifting token 'a' (0x56f46a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f46a30 'a') --> $$ = nterm item (0x56f46a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56f46a60->Object::Object { 0x56f46a00, 0x56f46a30 } -Next token is token 'a' (0x56f46a60 'a') -Shifting token 'a' (0x56f46a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f46a60 'a') --> $$ = nterm item (0x56f46a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56f46a90->Object::Object { 0x56f46a00, 0x56f46a30, 0x56f46a60 } -Next token is token 'a' (0x56f46a90 'a') -Shifting token 'a' (0x56f46a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f46a90 'a') --> $$ = nterm item (0x56f46a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x56f46ac0->Object::Object { 0x56f46a00, 0x56f46a30, 0x56f46a60, 0x56f46a90 } -Next token is token 'p' (0x56f46ac0 'p'Exception caught: cleaning lookahead and stack -0x56f46ac0->Object::~Object { 0x56f46a00, 0x56f46a30, 0x56f46a60, 0x56f46a90, 0x56f46ac0 } -0x56f46a90->Object::~Object { 0x56f46a00, 0x56f46a30, 0x56f46a60, 0x56f46a90 } -0x56f46a60->Object::~Object { 0x56f46a00, 0x56f46a30, 0x56f46a60 } -0x56f46a30->Object::~Object { 0x56f46a00, 0x56f46a30 } -0x56f46a00->Object::~Object { 0x56f46a00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -./c++.at:1361: $PREPARSER ./input aaaaE -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x56f46a00->Object::Object { } -Next token is token 'a' (0x56f46a00 'a') -Shifting token 'a' (0x56f46a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f46a00 'a') --> $$ = nterm item (0x56f46a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x56f46a30->Object::Object { 0x56f46a00 } -Next token is token 'a' (0x56f46a30 'a') -Shifting token 'a' (0x56f46a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f46a30 'a') --> $$ = nterm item (0x56f46a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x56f46a60->Object::Object { 0x56f46a00, 0x56f46a30 } -Next token is token 'a' (0x56f46a60 'a') -Shifting token 'a' (0x56f46a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f46a60 'a') --> $$ = nterm item (0x56f46a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x56f46a90->Object::Object { 0x56f46a00, 0x56f46a30, 0x56f46a60 } -Next token is token 'a' (0x56f46a90 'a') -Shifting token 'a' (0x56f46a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x56f46a90 'a') --> $$ = nterm item (0x56f46a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x56f46ac0->Object::Object { 0x56f46a00, 0x56f46a30, 0x56f46a60, 0x56f46a90 } -Next token is token 'p' (0x56f46ac0 'p'Exception caught: cleaning lookahead and stack -0x56f46ac0->Object::~Object { 0x56f46a00, 0x56f46a30, 0x56f46a60, 0x56f46a90, 0x56f46ac0 } -0x56f46a90->Object::~Object { 0x56f46a00, 0x56f46a30, 0x56f46a60, 0x56f46a90 } -0x56f46a60->Object::~Object { 0x56f46a00, 0x56f46a30, 0x56f46a60 } -0x56f46a30->Object::~Object { 0x56f46a00, 0x56f46a30 } -0x56f46a00->Object::~Object { 0x56f46a00 } +0xffdc42bf->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4 } +0xffdc4368->Object::Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc42bf } +0xffdc42bf->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc42bf, 0xffdc4368 } +Next token is token 'p' (0xffdc4368 'p'Exception caught: cleaning lookahead and stack +0x5780f3f4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0x5780f3f4, 0xffdc4368 } +0x5780f3e4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0x5780f3e4, 0xffdc4368 } +0x5780f3d4->Object::~Object { 0x5780f3c4, 0x5780f3d4, 0xffdc4368 } +0x5780f3c4->Object::~Object { 0x5780f3c4, 0xffdc4368 } +0xffdc4368->Object::~Object { 0xffdc4368 } exception caught: printer end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -exception caught: syntax error, unexpected end of file, expecting 'a' -stderr: +./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -./c++.at:1362: $PREPARSER ./input aaaaR +./c++.at:1363: $PREPARSER ./input aaaae stderr: -./c++.at:1361: $PREPARSER ./input aaaaT exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1360: $PREPARSER ./input aaaaE +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE stderr: -./c++.at:1361: $PREPARSER ./input aaaaR exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -======== Testing with C++ standard flags: '' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1360: $PREPARSER ./input aaaaR +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1363: ./exceptions || exit 77 -stderr: -Inner caught -Outer caught -./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy ======== Testing with C++ standard flags: '' ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:850: $PREPARSER ./input stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh -./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x2.cc x2.yy -./c++.at:1471: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x2.o x2.cc +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:1066: $PREPARSER ./input < in @@ -254639,15 +253077,79 @@ ./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./c++.at:858: $PREPARSER ./input stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: $PREPARSER ./input +stdout: +./c++.at:659: $PREPARSER ./input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token NUMBER (1) +Shifting token NUMBER (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 1 (line 34): + $1 = token NUMBER (1) +-> $$ = nterm expr (10) +destroy: 1 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token NUMBER (30) +Reducing stack by rule 2 (line 35): +-> $$ = nterm @1 (20) +Entering state 4 +Stack now 0 2 4 +Next token is token NUMBER (30) +Shifting token NUMBER (30) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 3 (line 35): + $1 = nterm expr (10) + $2 = nterm @1 (20) + $3 = token NUMBER (30) +expr: 10 20 30 +-> $$ = nterm expr (40) +destroy: 30 +destroy: 20 +destroy: 10 +Entering state 2 +Stack now 0 2 +Reading a token +Next token is token EOI () +Shifting token EOI () +Entering state 3 +Stack now 0 2 3 +Stack now 0 2 3 +Cleanup: popping token EOI () +Cleanup: popping nterm expr (40) +destroy: 40 +./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:1066: ./check ./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +675. c++.at:584: ok + +689. c++.at:1371: testing C++ GLR parser identifier shadowing ... +./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:1361: $PREPARSER ./input aaaas @@ -254671,57 +253173,57 @@ Entering state 0 Stack now 0 Reading a token -0x57adca00->Object::Object { } -Next token is token 'a' (0x57adca00 'a') -Shifting token 'a' (0x57adca00 'a') +0x56dcda00->Object::Object { } +Next token is token 'a' (0x56dcda00 'a') +Shifting token 'a' (0x56dcda00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57adca00 'a') --> $$ = nterm item (0x57adca00 'a') + $1 = token 'a' (0x56dcda00 'a') +-> $$ = nterm item (0x56dcda00 'a') Entering state 10 Stack now 0 10 Reading a token -0x57adca30->Object::Object { 0x57adca00 } -Next token is token 'a' (0x57adca30 'a') -Shifting token 'a' (0x57adca30 'a') +0x56dcda30->Object::Object { 0x56dcda00 } +Next token is token 'a' (0x56dcda30 'a') +Shifting token 'a' (0x56dcda30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57adca30 'a') --> $$ = nterm item (0x57adca30 'a') + $1 = token 'a' (0x56dcda30 'a') +-> $$ = nterm item (0x56dcda30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x57adca60->Object::Object { 0x57adca00, 0x57adca30 } -Next token is token 'a' (0x57adca60 'a') -Shifting token 'a' (0x57adca60 'a') +0x56dcda60->Object::Object { 0x56dcda00, 0x56dcda30 } +Next token is token 'a' (0x56dcda60 'a') +Shifting token 'a' (0x56dcda60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57adca60 'a') --> $$ = nterm item (0x57adca60 'a') + $1 = token 'a' (0x56dcda60 'a') +-> $$ = nterm item (0x56dcda60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x57adca90->Object::Object { 0x57adca00, 0x57adca30, 0x57adca60 } -Next token is token 'a' (0x57adca90 'a') -Shifting token 'a' (0x57adca90 'a') +0x56dcda90->Object::Object { 0x56dcda00, 0x56dcda30, 0x56dcda60 } +Next token is token 'a' (0x56dcda90 'a') +Shifting token 'a' (0x56dcda90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57adca90 'a') --> $$ = nterm item (0x57adca90 'a') + $1 = token 'a' (0x56dcda90 'a') +-> $$ = nterm item (0x56dcda90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x57adcac0->Object::Object { 0x57adca00, 0x57adca30, 0x57adca60, 0x57adca90 } -Next token is token 'p' (0x57adcac0 'p'Exception caught: cleaning lookahead and stack -0x57adcac0->Object::~Object { 0x57adca00, 0x57adca30, 0x57adca60, 0x57adca90, 0x57adcac0 } -0x57adca90->Object::~Object { 0x57adca00, 0x57adca30, 0x57adca60, 0x57adca90 } -0x57adca60->Object::~Object { 0x57adca00, 0x57adca30, 0x57adca60 } -0x57adca30->Object::~Object { 0x57adca00, 0x57adca30 } -0x57adca00->Object::~Object { 0x57adca00 } +0x56dcdac0->Object::Object { 0x56dcda00, 0x56dcda30, 0x56dcda60, 0x56dcda90 } +Next token is token 'p' (0x56dcdac0 'p'Exception caught: cleaning lookahead and stack +0x56dcdac0->Object::~Object { 0x56dcda00, 0x56dcda30, 0x56dcda60, 0x56dcda90, 0x56dcdac0 } +0x56dcda90->Object::~Object { 0x56dcda00, 0x56dcda30, 0x56dcda60, 0x56dcda90 } +0x56dcda60->Object::~Object { 0x56dcda00, 0x56dcda30, 0x56dcda60 } +0x56dcda30->Object::~Object { 0x56dcda00, 0x56dcda30 } +0x56dcda00->Object::~Object { 0x56dcda00 } exception caught: printer end { } ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -254730,57 +253232,57 @@ Entering state 0 Stack now 0 Reading a token -0x57adca00->Object::Object { } -Next token is token 'a' (0x57adca00 'a') -Shifting token 'a' (0x57adca00 'a') +0x56dcda00->Object::Object { } +Next token is token 'a' (0x56dcda00 'a') +Shifting token 'a' (0x56dcda00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57adca00 'a') --> $$ = nterm item (0x57adca00 'a') + $1 = token 'a' (0x56dcda00 'a') +-> $$ = nterm item (0x56dcda00 'a') Entering state 10 Stack now 0 10 Reading a token -0x57adca30->Object::Object { 0x57adca00 } -Next token is token 'a' (0x57adca30 'a') -Shifting token 'a' (0x57adca30 'a') +0x56dcda30->Object::Object { 0x56dcda00 } +Next token is token 'a' (0x56dcda30 'a') +Shifting token 'a' (0x56dcda30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57adca30 'a') --> $$ = nterm item (0x57adca30 'a') + $1 = token 'a' (0x56dcda30 'a') +-> $$ = nterm item (0x56dcda30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x57adca60->Object::Object { 0x57adca00, 0x57adca30 } -Next token is token 'a' (0x57adca60 'a') -Shifting token 'a' (0x57adca60 'a') +0x56dcda60->Object::Object { 0x56dcda00, 0x56dcda30 } +Next token is token 'a' (0x56dcda60 'a') +Shifting token 'a' (0x56dcda60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57adca60 'a') --> $$ = nterm item (0x57adca60 'a') + $1 = token 'a' (0x56dcda60 'a') +-> $$ = nterm item (0x56dcda60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x57adca90->Object::Object { 0x57adca00, 0x57adca30, 0x57adca60 } -Next token is token 'a' (0x57adca90 'a') -Shifting token 'a' (0x57adca90 'a') +0x56dcda90->Object::Object { 0x56dcda00, 0x56dcda30, 0x56dcda60 } +Next token is token 'a' (0x56dcda90 'a') +Shifting token 'a' (0x56dcda90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57adca90 'a') --> $$ = nterm item (0x57adca90 'a') + $1 = token 'a' (0x56dcda90 'a') +-> $$ = nterm item (0x56dcda90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x57adcac0->Object::Object { 0x57adca00, 0x57adca30, 0x57adca60, 0x57adca90 } -Next token is token 'p' (0x57adcac0 'p'Exception caught: cleaning lookahead and stack -0x57adcac0->Object::~Object { 0x57adca00, 0x57adca30, 0x57adca60, 0x57adca90, 0x57adcac0 } -0x57adca90->Object::~Object { 0x57adca00, 0x57adca30, 0x57adca60, 0x57adca90 } -0x57adca60->Object::~Object { 0x57adca00, 0x57adca30, 0x57adca60 } -0x57adca30->Object::~Object { 0x57adca00, 0x57adca30 } -0x57adca00->Object::~Object { 0x57adca00 } +0x56dcdac0->Object::Object { 0x56dcda00, 0x56dcda30, 0x56dcda60, 0x56dcda90 } +Next token is token 'p' (0x56dcdac0 'p'Exception caught: cleaning lookahead and stack +0x56dcdac0->Object::~Object { 0x56dcda00, 0x56dcda30, 0x56dcda60, 0x56dcda90, 0x56dcdac0 } +0x56dcda90->Object::~Object { 0x56dcda00, 0x56dcda30, 0x56dcda60, 0x56dcda90 } +0x56dcda60->Object::~Object { 0x56dcda00, 0x56dcda30, 0x56dcda60 } +0x56dcda30->Object::~Object { 0x56dcda00, 0x56dcda30 } +0x56dcda00->Object::~Object { 0x56dcda00 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr @@ -254804,225 +253306,116 @@ ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -======== Testing with C++ standard flags: '' +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stdout: -./c++.at:1362: $PREPARSER ./input aaaas +./c++.at:1064: $PREPARSER ./input < in stderr: stdout: +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in ./c++.at:1360: $PREPARSER ./input aaaas -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -exception caught: reduction +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaal stderr: exception caught: yylex -./c++.at:1362: $PREPARSER ./input i ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap ./c++.at:1360: $PREPARSER ./input aaaap stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap -stderr: ./c++.at:1360: $PREPARSER ./input --debug aaaap -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xff99418f->Object::Object { } -0xff994238->Object::Object { 0xff99418f } -0xff99418f->Object::~Object { 0xff99418f, 0xff994238 } -Next token is token 'a' (0xff994238 'a') -0xff9941a0->Object::Object { 0xff994238 } -0xff99412b->Object::Object { 0xff9941a0, 0xff994238 } -0xff99412b->Object::~Object { 0xff99412b, 0xff9941a0, 0xff994238 } -0xff994238->Object::~Object { 0xff9941a0, 0xff994238 } -Shifting token 'a' (0xff9941a0 'a') -0x57a873c4->Object::Object { 0xff9941a0 } -0xff99412f->Object::Object { 0x57a873c4, 0xff9941a0 } -0xff99412f->Object::~Object { 0x57a873c4, 0xff99412f, 0xff9941a0 } -0xff9941a0->Object::~Object { 0x57a873c4, 0xff9941a0 } -Entering state 2 -Stack now 0 2 -0xff994248->Object::Object { 0x57a873c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57a873c4 'a') --> $$ = nterm item (0xff994248 'a') -0x57a873c4->Object::~Object { 0x57a873c4, 0xff994248 } -0x57a873c4->Object::Object { 0xff994248 } -0xff99418f->Object::Object { 0x57a873c4, 0xff994248 } -0xff99418f->Object::~Object { 0x57a873c4, 0xff99418f, 0xff994248 } -0xff994248->Object::~Object { 0x57a873c4, 0xff994248 } -Entering state 11 -Stack now 0 11 -Reading a token -0xff99418f->Object::Object { 0x57a873c4 } -0xff994238->Object::Object { 0x57a873c4, 0xff99418f } -0xff99418f->Object::~Object { 0x57a873c4, 0xff99418f, 0xff994238 } -Next token is token 'a' (0xff994238 'a') -0xff9941a0->Object::Object { 0x57a873c4, 0xff994238 } -0xff99412b->Object::Object { 0x57a873c4, 0xff9941a0, 0xff994238 } -0xff99412b->Object::~Object { 0x57a873c4, 0xff99412b, 0xff9941a0, 0xff994238 } -0xff994238->Object::~Object { 0x57a873c4, 0xff9941a0, 0xff994238 } -Shifting token 'a' (0xff9941a0 'a') -0x57a873d4->Object::Object { 0x57a873c4, 0xff9941a0 } -0xff99412f->Object::Object { 0x57a873c4, 0x57a873d4, 0xff9941a0 } -0xff99412f->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff99412f, 0xff9941a0 } -0xff9941a0->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff9941a0 } -Entering state 2 -Stack now 0 11 2 -0xff994248->Object::Object { 0x57a873c4, 0x57a873d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57a873d4 'a') --> $$ = nterm item (0xff994248 'a') -0x57a873d4->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff994248 } -0x57a873d4->Object::Object { 0x57a873c4, 0xff994248 } -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0xff994248 } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff99418f, 0xff994248 } -0xff994248->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff994248 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4 } -0xff994238->Object::Object { 0x57a873c4, 0x57a873d4, 0xff99418f } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff99418f, 0xff994238 } -Next token is token 'a' (0xff994238 'a') -0xff9941a0->Object::Object { 0x57a873c4, 0x57a873d4, 0xff994238 } -0xff99412b->Object::Object { 0x57a873c4, 0x57a873d4, 0xff9941a0, 0xff994238 } -0xff99412b->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff99412b, 0xff9941a0, 0xff994238 } -0xff994238->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff9941a0, 0xff994238 } -Shifting token 'a' (0xff9941a0 'a') -0x57a873e4->Object::Object { 0x57a873c4, 0x57a873d4, 0xff9941a0 } -0xff99412f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0 } -0xff99412f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99412f, 0xff9941a0 } -0xff9941a0->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0 } -Entering state 2 -Stack now 0 11 11 2 -0xff994248->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57a873e4 'a') --> $$ = nterm item (0xff994248 'a') -0x57a873e4->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994248 } -0x57a873e4->Object::Object { 0x57a873c4, 0x57a873d4, 0xff994248 } -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994248 } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99418f, 0xff994248 } -0xff994248->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994248 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4 } -0xff994238->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99418f } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99418f, 0xff994238 } -Next token is token 'a' (0xff994238 'a') -0xff9941a0->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994238 } -0xff99412b->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0, 0xff994238 } -0xff99412b->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99412b, 0xff9941a0, 0xff994238 } -0xff994238->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0, 0xff994238 } -Shifting token 'a' (0xff9941a0 'a') -0x57a873f4->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0 } -0xff99412f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff9941a0 } -0xff99412f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff99412f, 0xff9941a0 } -0xff9941a0->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff9941a0 } -Entering state 2 -Stack now 0 11 11 11 2 -0xff994248->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57a873f4 'a') --> $$ = nterm item (0xff994248 'a') -0x57a873f4->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff994248 } -0x57a873f4->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994248 } -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff994248 } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff99418f, 0xff994248 } -0xff994248->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff994248 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4 } -0xff994238->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff99418f } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff99418f, 0xff994238 } -Next token is token 'p' (0xff994238 'p'Exception caught: cleaning lookahead and stack -0x57a873f4->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff994238 } -0x57a873e4->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994238 } -0x57a873d4->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff994238 } -0x57a873c4->Object::~Object { 0x57a873c4, 0xff994238 } -0xff994238->Object::~Object { 0xff994238 } -exception caught: printer -end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x57984a00->Object::Object { } -Next token is token 'a' (0x57984a00 'a') -Shifting token 'a' (0x57984a00 'a') +0x57660a00->Object::Object { } +Next token is token 'a' (0x57660a00 'a') +Shifting token 'a' (0x57660a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57984a00 'a') --> $$ = nterm item (0x57984a00 'a') + $1 = token 'a' (0x57660a00 'a') +-> $$ = nterm item (0x57660a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57984a30->Object::Object { 0x57984a00 } -Next token is token 'a' (0x57984a30 'a') -Shifting token 'a' (0x57984a30 'a') +0x57660a30->Object::Object { 0x57660a00 } +Next token is token 'a' (0x57660a30 'a') +Shifting token 'a' (0x57660a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57984a30 'a') --> $$ = nterm item (0x57984a30 'a') + $1 = token 'a' (0x57660a30 'a') +-> $$ = nterm item (0x57660a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57984a60->Object::Object { 0x57984a00, 0x57984a30 } -Next token is token 'a' (0x57984a60 'a') -Shifting token 'a' (0x57984a60 'a') +0x57660a60->Object::Object { 0x57660a00, 0x57660a30 } +Next token is token 'a' (0x57660a60 'a') +Shifting token 'a' (0x57660a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57984a60 'a') --> $$ = nterm item (0x57984a60 'a') + $1 = token 'a' (0x57660a60 'a') +-> $$ = nterm item (0x57660a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57984a90->Object::Object { 0x57984a00, 0x57984a30, 0x57984a60 } -Next token is token 'a' (0x57984a90 'a') -Shifting token 'a' (0x57984a90 'a') +0x57660a90->Object::Object { 0x57660a00, 0x57660a30, 0x57660a60 } +Next token is token 'a' (0x57660a90 'a') +Shifting token 'a' (0x57660a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57984a90 'a') --> $$ = nterm item (0x57984a90 'a') + $1 = token 'a' (0x57660a90 'a') +-> $$ = nterm item (0x57660a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57984ac0->Object::Object { 0x57984a00, 0x57984a30, 0x57984a60, 0x57984a90 } -Next token is token 'p' (0x57984ac0 'p'Exception caught: cleaning lookahead and stack -0x57984ac0->Object::~Object { 0x57984a00, 0x57984a30, 0x57984a60, 0x57984a90, 0x57984ac0 } -0x57984a90->Object::~Object { 0x57984a00, 0x57984a30, 0x57984a60, 0x57984a90 } -0x57984a60->Object::~Object { 0x57984a00, 0x57984a30, 0x57984a60 } -0x57984a30->Object::~Object { 0x57984a00, 0x57984a30 } -0x57984a00->Object::~Object { 0x57984a00 } +0x57660ac0->Object::Object { 0x57660a00, 0x57660a30, 0x57660a60, 0x57660a90 } +Next token is token 'p' (0x57660ac0 'p'Exception caught: cleaning lookahead and stack +0x57660ac0->Object::~Object { 0x57660a00, 0x57660a30, 0x57660a60, 0x57660a90, 0x57660ac0 } +0x57660a90->Object::~Object { 0x57660a00, 0x57660a30, 0x57660a60, 0x57660a90 } +0x57660a60->Object::~Object { 0x57660a00, 0x57660a30, 0x57660a60 } +0x57660a30->Object::~Object { 0x57660a00, 0x57660a30 } +0x57660a00->Object::~Object { 0x57660a00 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -255031,659 +253424,355 @@ Entering state 0 Stack now 0 Reading a token -0xff99418f->Object::Object { } -0xff994238->Object::Object { 0xff99418f } -0xff99418f->Object::~Object { 0xff99418f, 0xff994238 } -Next token is token 'a' (0xff994238 'a') -0xff9941a0->Object::Object { 0xff994238 } -0xff99412b->Object::Object { 0xff9941a0, 0xff994238 } -0xff99412b->Object::~Object { 0xff99412b, 0xff9941a0, 0xff994238 } -0xff994238->Object::~Object { 0xff9941a0, 0xff994238 } -Shifting token 'a' (0xff9941a0 'a') -0x57a873c4->Object::Object { 0xff9941a0 } -0xff99412f->Object::Object { 0x57a873c4, 0xff9941a0 } -0xff99412f->Object::~Object { 0x57a873c4, 0xff99412f, 0xff9941a0 } -0xff9941a0->Object::~Object { 0x57a873c4, 0xff9941a0 } -Entering state 2 -Stack now 0 2 -0xff994248->Object::Object { 0x57a873c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57a873c4 'a') --> $$ = nterm item (0xff994248 'a') -0x57a873c4->Object::~Object { 0x57a873c4, 0xff994248 } -0x57a873c4->Object::Object { 0xff994248 } -0xff99418f->Object::Object { 0x57a873c4, 0xff994248 } -0xff99418f->Object::~Object { 0x57a873c4, 0xff99418f, 0xff994248 } -0xff994248->Object::~Object { 0x57a873c4, 0xff994248 } -Entering state 11 -Stack now 0 11 -Reading a token -0xff99418f->Object::Object { 0x57a873c4 } -0xff994238->Object::Object { 0x57a873c4, 0xff99418f } -0xff99418f->Object::~Object { 0x57a873c4, 0xff99418f, 0xff994238 } -Next token is token 'a' (0xff994238 'a') -0xff9941a0->Object::Object { 0x57a873c4, 0xff994238 } -0xff99412b->Object::Object { 0x57a873c4, 0xff9941a0, 0xff994238 } -0xff99412b->Object::~Object { 0x57a873c4, 0xff99412b, 0xff9941a0, 0xff994238 } -0xff994238->Object::~Object { 0x57a873c4, 0xff9941a0, 0xff994238 } -Shifting token 'a' (0xff9941a0 'a') -0x57a873d4->Object::Object { 0x57a873c4, 0xff9941a0 } -0xff99412f->Object::Object { 0x57a873c4, 0x57a873d4, 0xff9941a0 } -0xff99412f->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff99412f, 0xff9941a0 } -0xff9941a0->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff9941a0 } -Entering state 2 -Stack now 0 11 2 -0xff994248->Object::Object { 0x57a873c4, 0x57a873d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57a873d4 'a') --> $$ = nterm item (0xff994248 'a') -0x57a873d4->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff994248 } -0x57a873d4->Object::Object { 0x57a873c4, 0xff994248 } -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0xff994248 } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff99418f, 0xff994248 } -0xff994248->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff994248 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4 } -0xff994238->Object::Object { 0x57a873c4, 0x57a873d4, 0xff99418f } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff99418f, 0xff994238 } -Next token is token 'a' (0xff994238 'a') -0xff9941a0->Object::Object { 0x57a873c4, 0x57a873d4, 0xff994238 } -0xff99412b->Object::Object { 0x57a873c4, 0x57a873d4, 0xff9941a0, 0xff994238 } -0xff99412b->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff99412b, 0xff9941a0, 0xff994238 } -0xff994238->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff9941a0, 0xff994238 } -Shifting token 'a' (0xff9941a0 'a') -0x57a873e4->Object::Object { 0x57a873c4, 0x57a873d4, 0xff9941a0 } -0xff99412f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0 } -0xff99412f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99412f, 0xff9941a0 } -0xff9941a0->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0 } -Entering state 2 -Stack now 0 11 11 2 -0xff994248->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57a873e4 'a') --> $$ = nterm item (0xff994248 'a') -0x57a873e4->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994248 } -0x57a873e4->Object::Object { 0x57a873c4, 0x57a873d4, 0xff994248 } -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994248 } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99418f, 0xff994248 } -0xff994248->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994248 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4 } -0xff994238->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99418f } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99418f, 0xff994238 } -Next token is token 'a' (0xff994238 'a') -0xff9941a0->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994238 } -0xff99412b->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0, 0xff994238 } -0xff99412b->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff99412b, 0xff9941a0, 0xff994238 } -0xff994238->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0, 0xff994238 } -Shifting token 'a' (0xff9941a0 'a') -0x57a873f4->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff9941a0 } -0xff99412f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff9941a0 } -0xff99412f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff99412f, 0xff9941a0 } -0xff9941a0->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff9941a0 } -Entering state 2 -Stack now 0 11 11 11 2 -0xff994248->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57a873f4 'a') --> $$ = nterm item (0xff994248 'a') -0x57a873f4->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff994248 } -0x57a873f4->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994248 } -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff994248 } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff99418f, 0xff994248 } -0xff994248->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff994248 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0xff99418f->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4 } -0xff994238->Object::Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff99418f } -0xff99418f->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff99418f, 0xff994238 } -Next token is token 'p' (0xff994238 'p'Exception caught: cleaning lookahead and stack -0x57a873f4->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0x57a873f4, 0xff994238 } -0x57a873e4->Object::~Object { 0x57a873c4, 0x57a873d4, 0x57a873e4, 0xff994238 } -0x57a873d4->Object::~Object { 0x57a873c4, 0x57a873d4, 0xff994238 } -0x57a873c4->Object::~Object { 0x57a873c4, 0xff994238 } -0xff994238->Object::~Object { 0xff994238 } -exception caught: printer -end { } -./c++.at:1362: grep '^exception caught: printer$' stderr -stderr: -stdout: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x57984a00->Object::Object { } -Next token is token 'a' (0x57984a00 'a') -Shifting token 'a' (0x57984a00 'a') +0x57660a00->Object::Object { } +Next token is token 'a' (0x57660a00 'a') +Shifting token 'a' (0x57660a00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57984a00 'a') --> $$ = nterm item (0x57984a00 'a') + $1 = token 'a' (0x57660a00 'a') +-> $$ = nterm item (0x57660a00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57984a30->Object::Object { 0x57984a00 } -Next token is token 'a' (0x57984a30 'a') -Shifting token 'a' (0x57984a30 'a') +0x57660a30->Object::Object { 0x57660a00 } +Next token is token 'a' (0x57660a30 'a') +Shifting token 'a' (0x57660a30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57984a30 'a') --> $$ = nterm item (0x57984a30 'a') + $1 = token 'a' (0x57660a30 'a') +-> $$ = nterm item (0x57660a30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57984a60->Object::Object { 0x57984a00, 0x57984a30 } -Next token is token 'a' (0x57984a60 'a') -Shifting token 'a' (0x57984a60 'a') +0x57660a60->Object::Object { 0x57660a00, 0x57660a30 } +Next token is token 'a' (0x57660a60 'a') +Shifting token 'a' (0x57660a60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57984a60 'a') --> $$ = nterm item (0x57984a60 'a') + $1 = token 'a' (0x57660a60 'a') +-> $$ = nterm item (0x57660a60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57984a90->Object::Object { 0x57984a00, 0x57984a30, 0x57984a60 } -Next token is token 'a' (0x57984a90 'a') -Shifting token 'a' (0x57984a90 'a') +0x57660a90->Object::Object { 0x57660a00, 0x57660a30, 0x57660a60 } +Next token is token 'a' (0x57660a90 'a') +Shifting token 'a' (0x57660a90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57984a90 'a') --> $$ = nterm item (0x57984a90 'a') + $1 = token 'a' (0x57660a90 'a') +-> $$ = nterm item (0x57660a90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57984ac0->Object::Object { 0x57984a00, 0x57984a30, 0x57984a60, 0x57984a90 } -Next token is token 'p' (0x57984ac0 'p'Exception caught: cleaning lookahead and stack -0x57984ac0->Object::~Object { 0x57984a00, 0x57984a30, 0x57984a60, 0x57984a90, 0x57984ac0 } -0x57984a90->Object::~Object { 0x57984a00, 0x57984a30, 0x57984a60, 0x57984a90 } -0x57984a60->Object::~Object { 0x57984a00, 0x57984a30, 0x57984a60 } -0x57984a30->Object::~Object { 0x57984a00, 0x57984a30 } -0x57984a00->Object::~Object { 0x57984a00 } +0x57660ac0->Object::Object { 0x57660a00, 0x57660a30, 0x57660a60, 0x57660a90 } +Next token is token 'p' (0x57660ac0 'p'Exception caught: cleaning lookahead and stack +0x57660ac0->Object::~Object { 0x57660a00, 0x57660a30, 0x57660a60, 0x57660a90, 0x57660ac0 } +0x57660a90->Object::~Object { 0x57660a00, 0x57660a30, 0x57660a60, 0x57660a90 } +0x57660a60->Object::~Object { 0x57660a00, 0x57660a30, 0x57660a60 } +0x57660a30->Object::~Object { 0x57660a00, 0x57660a30 } +0x57660a00->Object::~Object { 0x57660a00 } exception caught: printer end { } -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae ./c++.at:1360: grep '^exception caught: printer$' stderr stdout: exception caught: printer -stderr: -exception caught: syntax error ./c++.at:1360: $PREPARSER ./input aaaae -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1362: $PREPARSER ./input aaaaE exception caught: syntax error ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaaT stderr: +stderr: +stdout: +./c++.at:567: $here/modern +stdout: +Modern C++: 201703 +./c++.at:567: $PREPARSER ./list ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaR stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./c++.at:1501: $CXX $CPPFLAGS $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS stderr: stdout: -./c++.at:858: $PREPARSER ./input +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: $here/modern +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +Modern C++: 201703 +./c++.at:568: $PREPARSER ./list stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaas +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: +./c++.at:570: $here/modern +stdout: +Modern C++: 201703 +./c++.at:570: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $PREPARSER ./input aaaal -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i +stdout: +./c++.at:235: $PREPARSER ./list +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +664. c++.at:107: ok stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap +stdout: +./c++.at:850: $PREPARSER ./input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffc0746f->Object::Object { } -0xffc07518->Object::Object { 0xffc0746f } -0xffc0746f->Object::~Object { 0xffc0746f, 0xffc07518 } -Next token is token 'a' (0xffc07518 'a') -0xffc07468->Object::Object { 0xffc07518 } -0xffc07518->Object::~Object { 0xffc07468, 0xffc07518 } -Shifting token 'a' (0xffc07468 'a') -0x57cad3c4->Object::Object { 0xffc07468 } -0xffc07468->Object::~Object { 0x57cad3c4, 0xffc07468 } -Entering state 1 -Stack now 0 1 -0xffc07528->Object::Object { 0x57cad3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57cad3c4 'a') --> $$ = nterm item (0xffc07528 'a') -0x57cad3c4->Object::~Object { 0x57cad3c4, 0xffc07528 } -0x57cad3c4->Object::Object { 0xffc07528 } -0xffc07528->Object::~Object { 0x57cad3c4, 0xffc07528 } -Entering state 10 -Stack now 0 10 -Reading a token -0xffc0746f->Object::Object { 0x57cad3c4 } -0xffc07518->Object::Object { 0x57cad3c4, 0xffc0746f } -0xffc0746f->Object::~Object { 0x57cad3c4, 0xffc0746f, 0xffc07518 } -Next token is token 'a' (0xffc07518 'a') -0xffc07468->Object::Object { 0x57cad3c4, 0xffc07518 } -0xffc07518->Object::~Object { 0x57cad3c4, 0xffc07468, 0xffc07518 } -Shifting token 'a' (0xffc07468 'a') -0x57cad3d4->Object::Object { 0x57cad3c4, 0xffc07468 } -0xffc07468->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07468 } -Entering state 1 -Stack now 0 10 1 -0xffc07528->Object::Object { 0x57cad3c4, 0x57cad3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57cad3d4 'a') --> $$ = nterm item (0xffc07528 'a') -0x57cad3d4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07528 } -0x57cad3d4->Object::Object { 0x57cad3c4, 0xffc07528 } -0xffc07528->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07528 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xffc0746f->Object::Object { 0x57cad3c4, 0x57cad3d4 } -0xffc07518->Object::Object { 0x57cad3c4, 0x57cad3d4, 0xffc0746f } -0xffc0746f->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc0746f, 0xffc07518 } -Next token is token 'a' (0xffc07518 'a') -0xffc07468->Object::Object { 0x57cad3c4, 0x57cad3d4, 0xffc07518 } -0xffc07518->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07468, 0xffc07518 } -Shifting token 'a' (0xffc07468 'a') -0x57cad3e4->Object::Object { 0x57cad3c4, 0x57cad3d4, 0xffc07468 } -0xffc07468->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07468 } -Entering state 1 -Stack now 0 10 10 1 -0xffc07528->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57cad3e4 'a') --> $$ = nterm item (0xffc07528 'a') -0x57cad3e4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07528 } -0x57cad3e4->Object::Object { 0x57cad3c4, 0x57cad3d4, 0xffc07528 } -0xffc07528->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07528 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xffc0746f->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4 } -0xffc07518->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc0746f } -0xffc0746f->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc0746f, 0xffc07518 } -Next token is token 'a' (0xffc07518 'a') -0xffc07468->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07518 } -0xffc07518->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07468, 0xffc07518 } -Shifting token 'a' (0xffc07468 'a') -0x57cad3f4->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07468 } -0xffc07468->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc07468 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffc07528->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57cad3f4 'a') --> $$ = nterm item (0xffc07528 'a') -0x57cad3f4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc07528 } -0x57cad3f4->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07528 } -0xffc07528->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc07528 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xffc0746f->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4 } -0xffc07518->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc0746f } -0xffc0746f->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc0746f, 0xffc07518 } -Next token is token 'p' (0xffc07518 'p'Exception caught: cleaning lookahead and stack -0x57cad3f4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc07518 } -0x57cad3e4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07518 } -0x57cad3d4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07518 } -0x57cad3c4->Object::~Object { 0x57cad3c4, 0xffc07518 } -0xffc07518->Object::~Object { 0xffc07518 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +690. c++.at:1422: testing Shared locations ... +./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x1.cc x1.yy stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffc0746f->Object::Object { } -0xffc07518->Object::Object { 0xffc0746f } -0xffc0746f->Object::~Object { 0xffc0746f, 0xffc07518 } -Next token is token 'a' (0xffc07518 'a') -0xffc07468->Object::Object { 0xffc07518 } -0xffc07518->Object::~Object { 0xffc07468, 0xffc07518 } -Shifting token 'a' (0xffc07468 'a') -0x57cad3c4->Object::Object { 0xffc07468 } -0xffc07468->Object::~Object { 0x57cad3c4, 0xffc07468 } -Entering state 1 -Stack now 0 1 -0xffc07528->Object::Object { 0x57cad3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57cad3c4 'a') --> $$ = nterm item (0xffc07528 'a') -0x57cad3c4->Object::~Object { 0x57cad3c4, 0xffc07528 } -0x57cad3c4->Object::Object { 0xffc07528 } -0xffc07528->Object::~Object { 0x57cad3c4, 0xffc07528 } -Entering state 10 -Stack now 0 10 -Reading a token -0xffc0746f->Object::Object { 0x57cad3c4 } -0xffc07518->Object::Object { 0x57cad3c4, 0xffc0746f } -0xffc0746f->Object::~Object { 0x57cad3c4, 0xffc0746f, 0xffc07518 } -Next token is token 'a' (0xffc07518 'a') -0xffc07468->Object::Object { 0x57cad3c4, 0xffc07518 } -0xffc07518->Object::~Object { 0x57cad3c4, 0xffc07468, 0xffc07518 } -Shifting token 'a' (0xffc07468 'a') -0x57cad3d4->Object::Object { 0x57cad3c4, 0xffc07468 } -0xffc07468->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07468 } -Entering state 1 -Stack now 0 10 1 -0xffc07528->Object::Object { 0x57cad3c4, 0x57cad3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57cad3d4 'a') --> $$ = nterm item (0xffc07528 'a') -0x57cad3d4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07528 } -0x57cad3d4->Object::Object { 0x57cad3c4, 0xffc07528 } -0xffc07528->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07528 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xffc0746f->Object::Object { 0x57cad3c4, 0x57cad3d4 } -0xffc07518->Object::Object { 0x57cad3c4, 0x57cad3d4, 0xffc0746f } -0xffc0746f->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc0746f, 0xffc07518 } -Next token is token 'a' (0xffc07518 'a') -0xffc07468->Object::Object { 0x57cad3c4, 0x57cad3d4, 0xffc07518 } -0xffc07518->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07468, 0xffc07518 } -Shifting token 'a' (0xffc07468 'a') -0x57cad3e4->Object::Object { 0x57cad3c4, 0x57cad3d4, 0xffc07468 } -0xffc07468->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07468 } -Entering state 1 -Stack now 0 10 10 1 -0xffc07528->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57cad3e4 'a') --> $$ = nterm item (0xffc07528 'a') -0x57cad3e4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07528 } -0x57cad3e4->Object::Object { 0x57cad3c4, 0x57cad3d4, 0xffc07528 } -0xffc07528->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07528 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xffc0746f->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4 } -0xffc07518->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc0746f } -0xffc0746f->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc0746f, 0xffc07518 } -Next token is token 'a' (0xffc07518 'a') -0xffc07468->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07518 } -0xffc07518->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07468, 0xffc07518 } -Shifting token 'a' (0xffc07468 'a') -0x57cad3f4->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07468 } -0xffc07468->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc07468 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffc07528->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57cad3f4 'a') --> $$ = nterm item (0xffc07528 'a') -0x57cad3f4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc07528 } -0x57cad3f4->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07528 } -0xffc07528->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc07528 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xffc0746f->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4 } -0xffc07518->Object::Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc0746f } -0xffc0746f->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc0746f, 0xffc07518 } -Next token is token 'p' (0xffc07518 'p'Exception caught: cleaning lookahead and stack -0x57cad3f4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0x57cad3f4, 0xffc07518 } -0x57cad3e4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0x57cad3e4, 0xffc07518 } -0x57cad3d4->Object::~Object { 0x57cad3c4, 0x57cad3d4, 0xffc07518 } -0x57cad3c4->Object::~Object { 0x57cad3c4, 0xffc07518 } -0xffc07518->Object::~Object { 0xffc07518 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1456: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x1.o x1.cc stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT +stdout: +./c++.at:856: $PREPARSER ./input stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none stderr: stdout: -./c++.at:1502: $PREPARSER ./parser -stderr: -./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -690. c++.at:1422: ok - -691. c++.at:1517: testing Default action ... ======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./c++.at:572: $here/modern stdout: -./c++.at:858: $PREPARSER ./input +Modern C++: 201703 +./c++.at:572: $PREPARSER ./list stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:1361: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap +./c++.at:571: $here/modern +stdout: +Modern C++: 201703 +./c++.at:571: $PREPARSER ./list stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x5701da00->Object::Object { } -Next token is token 'a' (0x5701da00 'a') -Shifting token 'a' (0x5701da00 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5701da00 'a') --> $$ = nterm item (0x5701da00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x5701da30->Object::Object { 0x5701da00 } -Next token is token 'a' (0x5701da30 'a') -Shifting token 'a' (0x5701da30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5701da30 'a') --> $$ = nterm item (0x5701da30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x5701da60->Object::Object { 0x5701da00, 0x5701da30 } -Next token is token 'a' (0x5701da60 'a') -Shifting token 'a' (0x5701da60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5701da60 'a') --> $$ = nterm item (0x5701da60 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x5701da90->Object::Object { 0x5701da00, 0x5701da30, 0x5701da60 } -Next token is token 'a' (0x5701da90 'a') -Shifting token 'a' (0x5701da90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5701da90 'a') --> $$ = nterm item (0x5701da90 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x5701dac0->Object::Object { 0x5701da00, 0x5701da30, 0x5701da60, 0x5701da90 } -Next token is token 'p' (0x5701dac0 'p'Exception caught: cleaning lookahead and stack -0x5701dac0->Object::~Object { 0x5701da00, 0x5701da30, 0x5701da60, 0x5701da90, 0x5701dac0 } -0x5701da90->Object::~Object { 0x5701da00, 0x5701da30, 0x5701da60, 0x5701da90 } -0x5701da60->Object::~Object { 0x5701da00, 0x5701da30, 0x5701da60 } -0x5701da30->Object::~Object { 0x5701da00, 0x5701da30 } -0x5701da00->Object::~Object { 0x5701da00 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x5701da00->Object::Object { } -Next token is token 'a' (0x5701da00 'a') -Shifting token 'a' (0x5701da00 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5701da00 'a') --> $$ = nterm item (0x5701da00 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x5701da30->Object::Object { 0x5701da00 } -Next token is token 'a' (0x5701da30 'a') -Shifting token 'a' (0x5701da30 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5701da30 'a') --> $$ = nterm item (0x5701da30 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x5701da60->Object::Object { 0x5701da00, 0x5701da30 } -Next token is token 'a' (0x5701da60 'a') -Shifting token 'a' (0x5701da60 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5701da60 'a') --> $$ = nterm item (0x5701da60 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x5701da90->Object::Object { 0x5701da00, 0x5701da30, 0x5701da60 } -Next token is token 'a' (0x5701da90 'a') -Shifting token 'a' (0x5701da90 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5701da90 'a') --> $$ = nterm item (0x5701da90 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x5701dac0->Object::Object { 0x5701da00, 0x5701da30, 0x5701da60, 0x5701da90 } -Next token is token 'p' (0x5701dac0 'p'Exception caught: cleaning lookahead and stack -0x5701dac0->Object::~Object { 0x5701da00, 0x5701da30, 0x5701da60, 0x5701da90, 0x5701dac0 } -0x5701da90->Object::~Object { 0x5701da00, 0x5701da30, 0x5701da60, 0x5701da90 } -0x5701da60->Object::~Object { 0x5701da00, 0x5701da30, 0x5701da60 } -0x5701da30->Object::~Object { 0x5701da00, 0x5701da30 } -0x5701da00->Object::~Object { 0x5701da00 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:569: $here/modern +stdout: +Modern C++: 201703 +./c++.at:569: $PREPARSER ./list stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaT +stdout: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR +stdout: +./c++.at:566: $here/modern +stdout: +Modern C++: 202002 +./c++.at:566: $PREPARSER ./list stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:1362: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:1362: $PREPARSER ./input aaaal stderr: exception caught: yylex @@ -255701,99 +253790,123 @@ Entering state 0 Stack now 0 Reading a token -0xffe0e40f->Object::Object { } -0xffe0e4b8->Object::Object { 0xffe0e40f } -0xffe0e40f->Object::~Object { 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'a' (0xffe0e4b8 'a') -0xffe0e408->Object::Object { 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0xffe0e408, 0xffe0e4b8 } -Shifting token 'a' (0xffe0e408 'a') -0x56f9c3c4->Object::Object { 0xffe0e408 } -0xffe0e408->Object::~Object { 0x56f9c3c4, 0xffe0e408 } +0xffe6f42f->Object::Object { } +0xffe6f4d8->Object::Object { 0xffe6f42f } +0xffe6f42f->Object::~Object { 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'a' (0xffe6f4d8 'a') +0xffe6f440->Object::Object { 0xffe6f4d8 } +0xffe6f3cb->Object::Object { 0xffe6f440, 0xffe6f4d8 } +0xffe6f3cb->Object::~Object { 0xffe6f3cb, 0xffe6f440, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0xffe6f440, 0xffe6f4d8 } +Shifting token 'a' (0xffe6f440 'a') +0x576213c4->Object::Object { 0xffe6f440 } +0xffe6f3cf->Object::Object { 0x576213c4, 0xffe6f440 } +0xffe6f3cf->Object::~Object { 0x576213c4, 0xffe6f3cf, 0xffe6f440 } +0xffe6f440->Object::~Object { 0x576213c4, 0xffe6f440 } Entering state 2 Stack now 0 2 -0xffe0e4c8->Object::Object { 0x56f9c3c4 } +0xffe6f4e8->Object::Object { 0x576213c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56f9c3c4 'a') --> $$ = nterm item (0xffe0e4c8 'a') -0x56f9c3c4->Object::~Object { 0x56f9c3c4, 0xffe0e4c8 } -0x56f9c3c4->Object::Object { 0xffe0e4c8 } -0xffe0e4c8->Object::~Object { 0x56f9c3c4, 0xffe0e4c8 } + $1 = token 'a' (0x576213c4 'a') +-> $$ = nterm item (0xffe6f4e8 'a') +0x576213c4->Object::~Object { 0x576213c4, 0xffe6f4e8 } +0x576213c4->Object::Object { 0xffe6f4e8 } +0xffe6f42f->Object::Object { 0x576213c4, 0xffe6f4e8 } +0xffe6f42f->Object::~Object { 0x576213c4, 0xffe6f42f, 0xffe6f4e8 } +0xffe6f4e8->Object::~Object { 0x576213c4, 0xffe6f4e8 } Entering state 11 Stack now 0 11 Reading a token -0xffe0e40f->Object::Object { 0x56f9c3c4 } -0xffe0e4b8->Object::Object { 0x56f9c3c4, 0xffe0e40f } -0xffe0e40f->Object::~Object { 0x56f9c3c4, 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'a' (0xffe0e4b8 'a') -0xffe0e408->Object::Object { 0x56f9c3c4, 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0x56f9c3c4, 0xffe0e408, 0xffe0e4b8 } -Shifting token 'a' (0xffe0e408 'a') -0x56f9c3d4->Object::Object { 0x56f9c3c4, 0xffe0e408 } -0xffe0e408->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e408 } +0xffe6f42f->Object::Object { 0x576213c4 } +0xffe6f4d8->Object::Object { 0x576213c4, 0xffe6f42f } +0xffe6f42f->Object::~Object { 0x576213c4, 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'a' (0xffe6f4d8 'a') +0xffe6f440->Object::Object { 0x576213c4, 0xffe6f4d8 } +0xffe6f3cb->Object::Object { 0x576213c4, 0xffe6f440, 0xffe6f4d8 } +0xffe6f3cb->Object::~Object { 0x576213c4, 0xffe6f3cb, 0xffe6f440, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0x576213c4, 0xffe6f440, 0xffe6f4d8 } +Shifting token 'a' (0xffe6f440 'a') +0x576213d4->Object::Object { 0x576213c4, 0xffe6f440 } +0xffe6f3cf->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f440 } +0xffe6f3cf->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f3cf, 0xffe6f440 } +0xffe6f440->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f440 } Entering state 2 Stack now 0 11 2 -0xffe0e4c8->Object::Object { 0x56f9c3c4, 0x56f9c3d4 } +0xffe6f4e8->Object::Object { 0x576213c4, 0x576213d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56f9c3d4 'a') --> $$ = nterm item (0xffe0e4c8 'a') -0x56f9c3d4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4c8 } -0x56f9c3d4->Object::Object { 0x56f9c3c4, 0xffe0e4c8 } -0xffe0e4c8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4c8 } + $1 = token 'a' (0x576213d4 'a') +-> $$ = nterm item (0xffe6f4e8 'a') +0x576213d4->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f4e8 } +0x576213d4->Object::Object { 0x576213c4, 0xffe6f4e8 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f4e8 } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f42f, 0xffe6f4e8 } +0xffe6f4e8->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f4e8 } Entering state 11 Stack now 0 11 11 Reading a token -0xffe0e40f->Object::Object { 0x56f9c3c4, 0x56f9c3d4 } -0xffe0e4b8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e40f } -0xffe0e40f->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'a' (0xffe0e4b8 'a') -0xffe0e408->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e408, 0xffe0e4b8 } -Shifting token 'a' (0xffe0e408 'a') -0x56f9c3e4->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e408 } -0xffe0e408->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e408 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4 } +0xffe6f4d8->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f42f } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'a' (0xffe6f4d8 'a') +0xffe6f440->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f4d8 } +0xffe6f3cb->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f440, 0xffe6f4d8 } +0xffe6f3cb->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f3cb, 0xffe6f440, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f440, 0xffe6f4d8 } +Shifting token 'a' (0xffe6f440 'a') +0x576213e4->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f440 } +0xffe6f3cf->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440 } +0xffe6f3cf->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f3cf, 0xffe6f440 } +0xffe6f440->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440 } Entering state 2 Stack now 0 11 11 2 -0xffe0e4c8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4 } +0xffe6f4e8->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56f9c3e4 'a') --> $$ = nterm item (0xffe0e4c8 'a') -0x56f9c3e4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4c8 } -0x56f9c3e4->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4c8 } -0xffe0e4c8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4c8 } + $1 = token 'a' (0x576213e4 'a') +-> $$ = nterm item (0xffe6f4e8 'a') +0x576213e4->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4e8 } +0x576213e4->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f4e8 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4e8 } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f42f, 0xffe6f4e8 } +0xffe6f4e8->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4e8 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xffe0e40f->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4 } -0xffe0e4b8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e40f } -0xffe0e40f->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'a' (0xffe0e4b8 'a') -0xffe0e408->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e408, 0xffe0e4b8 } -Shifting token 'a' (0xffe0e408 'a') -0x56f9c3f4->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e408 } -0xffe0e408->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e408 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4 } +0xffe6f4d8->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f42f } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'a' (0xffe6f4d8 'a') +0xffe6f440->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4d8 } +0xffe6f3cb->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440, 0xffe6f4d8 } +0xffe6f3cb->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f3cb, 0xffe6f440, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440, 0xffe6f4d8 } +Shifting token 'a' (0xffe6f440 'a') +0x576213f4->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440 } +0xffe6f3cf->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f440 } +0xffe6f3cf->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f3cf, 0xffe6f440 } +0xffe6f440->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f440 } Entering state 2 Stack now 0 11 11 11 2 -0xffe0e4c8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4 } +0xffe6f4e8->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56f9c3f4 'a') --> $$ = nterm item (0xffe0e4c8 'a') -0x56f9c3f4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e4c8 } -0x56f9c3f4->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4c8 } -0xffe0e4c8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e4c8 } + $1 = token 'a' (0x576213f4 'a') +-> $$ = nterm item (0xffe6f4e8 'a') +0x576213f4->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f4e8 } +0x576213f4->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4e8 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f4e8 } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f42f, 0xffe6f4e8 } +0xffe6f4e8->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f4e8 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xffe0e40f->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4 } -0xffe0e4b8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e40f } -0xffe0e40f->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'p' (0xffe0e4b8 'p'Exception caught: cleaning lookahead and stack -0x56f9c3f4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e4b8 } -0x56f9c3e4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4b8 } -0x56f9c3d4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4b8 } -0x56f9c3c4->Object::~Object { 0x56f9c3c4, 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0xffe0e4b8 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4 } +0xffe6f4d8->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f42f } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'p' (0xffe6f4d8 'p'Exception caught: cleaning lookahead and stack +0x576213f4->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f4d8 } +0x576213e4->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4d8 } +0x576213d4->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f4d8 } +0x576213c4->Object::~Object { 0x576213c4, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0xffe6f4d8 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -255802,1054 +253915,731 @@ Entering state 0 Stack now 0 Reading a token -0xffe0e40f->Object::Object { } -0xffe0e4b8->Object::Object { 0xffe0e40f } -0xffe0e40f->Object::~Object { 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'a' (0xffe0e4b8 'a') -0xffe0e408->Object::Object { 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0xffe0e408, 0xffe0e4b8 } -Shifting token 'a' (0xffe0e408 'a') -0x56f9c3c4->Object::Object { 0xffe0e408 } -0xffe0e408->Object::~Object { 0x56f9c3c4, 0xffe0e408 } +0xffe6f42f->Object::Object { } +0xffe6f4d8->Object::Object { 0xffe6f42f } +0xffe6f42f->Object::~Object { 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'a' (0xffe6f4d8 'a') +0xffe6f440->Object::Object { 0xffe6f4d8 } +0xffe6f3cb->Object::Object { 0xffe6f440, 0xffe6f4d8 } +0xffe6f3cb->Object::~Object { 0xffe6f3cb, 0xffe6f440, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0xffe6f440, 0xffe6f4d8 } +Shifting token 'a' (0xffe6f440 'a') +0x576213c4->Object::Object { 0xffe6f440 } +0xffe6f3cf->Object::Object { 0x576213c4, 0xffe6f440 } +0xffe6f3cf->Object::~Object { 0x576213c4, 0xffe6f3cf, 0xffe6f440 } +0xffe6f440->Object::~Object { 0x576213c4, 0xffe6f440 } Entering state 2 Stack now 0 2 -0xffe0e4c8->Object::Object { 0x56f9c3c4 } +0xffe6f4e8->Object::Object { 0x576213c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56f9c3c4 'a') --> $$ = nterm item (0xffe0e4c8 'a') -0x56f9c3c4->Object::~Object { 0x56f9c3c4, 0xffe0e4c8 } -0x56f9c3c4->Object::Object { 0xffe0e4c8 } -0xffe0e4c8->Object::~Object { 0x56f9c3c4, 0xffe0e4c8 } + $1 = token 'a' (0x576213c4 'a') +-> $$ = nterm item (0xffe6f4e8 'a') +0x576213c4->Object::~Object { 0x576213c4, 0xffe6f4e8 } +0x576213c4->Object::Object { 0xffe6f4e8 } +0xffe6f42f->Object::Object { 0x576213c4, 0xffe6f4e8 } +0xffe6f42f->Object::~Object { 0x576213c4, 0xffe6f42f, 0xffe6f4e8 } +0xffe6f4e8->Object::~Object { 0x576213c4, 0xffe6f4e8 } Entering state 11 Stack now 0 11 Reading a token -0xffe0e40f->Object::Object { 0x56f9c3c4 } -0xffe0e4b8->Object::Object { 0x56f9c3c4, 0xffe0e40f } -0xffe0e40f->Object::~Object { 0x56f9c3c4, 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'a' (0xffe0e4b8 'a') -0xffe0e408->Object::Object { 0x56f9c3c4, 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0x56f9c3c4, 0xffe0e408, 0xffe0e4b8 } -Shifting token 'a' (0xffe0e408 'a') -0x56f9c3d4->Object::Object { 0x56f9c3c4, 0xffe0e408 } -0xffe0e408->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e408 } +0xffe6f42f->Object::Object { 0x576213c4 } +0xffe6f4d8->Object::Object { 0x576213c4, 0xffe6f42f } +0xffe6f42f->Object::~Object { 0x576213c4, 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'a' (0xffe6f4d8 'a') +0xffe6f440->Object::Object { 0x576213c4, 0xffe6f4d8 } +0xffe6f3cb->Object::Object { 0x576213c4, 0xffe6f440, 0xffe6f4d8 } +0xffe6f3cb->Object::~Object { 0x576213c4, 0xffe6f3cb, 0xffe6f440, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0x576213c4, 0xffe6f440, 0xffe6f4d8 } +Shifting token 'a' (0xffe6f440 'a') +0x576213d4->Object::Object { 0x576213c4, 0xffe6f440 } +0xffe6f3cf->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f440 } +0xffe6f3cf->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f3cf, 0xffe6f440 } +0xffe6f440->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f440 } Entering state 2 Stack now 0 11 2 -0xffe0e4c8->Object::Object { 0x56f9c3c4, 0x56f9c3d4 } +0xffe6f4e8->Object::Object { 0x576213c4, 0x576213d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56f9c3d4 'a') --> $$ = nterm item (0xffe0e4c8 'a') -0x56f9c3d4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4c8 } -0x56f9c3d4->Object::Object { 0x56f9c3c4, 0xffe0e4c8 } -0xffe0e4c8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4c8 } + $1 = token 'a' (0x576213d4 'a') +-> $$ = nterm item (0xffe6f4e8 'a') +0x576213d4->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f4e8 } +0x576213d4->Object::Object { 0x576213c4, 0xffe6f4e8 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f4e8 } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f42f, 0xffe6f4e8 } +0xffe6f4e8->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f4e8 } Entering state 11 Stack now 0 11 11 Reading a token -0xffe0e40f->Object::Object { 0x56f9c3c4, 0x56f9c3d4 } -0xffe0e4b8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e40f } -0xffe0e40f->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'a' (0xffe0e4b8 'a') -0xffe0e408->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e408, 0xffe0e4b8 } -Shifting token 'a' (0xffe0e408 'a') -0x56f9c3e4->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e408 } -0xffe0e408->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e408 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4 } +0xffe6f4d8->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f42f } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'a' (0xffe6f4d8 'a') +0xffe6f440->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f4d8 } +0xffe6f3cb->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f440, 0xffe6f4d8 } +0xffe6f3cb->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f3cb, 0xffe6f440, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f440, 0xffe6f4d8 } +Shifting token 'a' (0xffe6f440 'a') +0x576213e4->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f440 } +0xffe6f3cf->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440 } +0xffe6f3cf->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f3cf, 0xffe6f440 } +0xffe6f440->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440 } Entering state 2 Stack now 0 11 11 2 -0xffe0e4c8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4 } +0xffe6f4e8->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56f9c3e4 'a') --> $$ = nterm item (0xffe0e4c8 'a') -0x56f9c3e4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4c8 } -0x56f9c3e4->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4c8 } -0xffe0e4c8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4c8 } + $1 = token 'a' (0x576213e4 'a') +-> $$ = nterm item (0xffe6f4e8 'a') +0x576213e4->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4e8 } +0x576213e4->Object::Object { 0x576213c4, 0x576213d4, 0xffe6f4e8 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4e8 } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f42f, 0xffe6f4e8 } +0xffe6f4e8->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4e8 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xffe0e40f->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4 } -0xffe0e4b8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e40f } -0xffe0e40f->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'a' (0xffe0e4b8 'a') -0xffe0e408->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e408, 0xffe0e4b8 } -Shifting token 'a' (0xffe0e408 'a') -0x56f9c3f4->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e408 } -0xffe0e408->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e408 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4 } +0xffe6f4d8->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f42f } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'a' (0xffe6f4d8 'a') +0xffe6f440->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4d8 } +0xffe6f3cb->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440, 0xffe6f4d8 } +0xffe6f3cb->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f3cb, 0xffe6f440, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440, 0xffe6f4d8 } +Shifting token 'a' (0xffe6f440 'a') +0x576213f4->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f440 } +0xffe6f3cf->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f440 } +0xffe6f3cf->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f3cf, 0xffe6f440 } +0xffe6f440->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f440 } Entering state 2 Stack now 0 11 11 11 2 -0xffe0e4c8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4 } +0xffe6f4e8->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56f9c3f4 'a') --> $$ = nterm item (0xffe0e4c8 'a') -0x56f9c3f4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e4c8 } -0x56f9c3f4->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4c8 } -0xffe0e4c8->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e4c8 } + $1 = token 'a' (0x576213f4 'a') +-> $$ = nterm item (0xffe6f4e8 'a') +0x576213f4->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f4e8 } +0x576213f4->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4e8 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f4e8 } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f42f, 0xffe6f4e8 } +0xffe6f4e8->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f4e8 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xffe0e40f->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4 } -0xffe0e4b8->Object::Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e40f } -0xffe0e40f->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e40f, 0xffe0e4b8 } -Next token is token 'p' (0xffe0e4b8 'p'Exception caught: cleaning lookahead and stack -0x56f9c3f4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0x56f9c3f4, 0xffe0e4b8 } -0x56f9c3e4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0x56f9c3e4, 0xffe0e4b8 } -0x56f9c3d4->Object::~Object { 0x56f9c3c4, 0x56f9c3d4, 0xffe0e4b8 } -0x56f9c3c4->Object::~Object { 0x56f9c3c4, 0xffe0e4b8 } -0xffe0e4b8->Object::~Object { 0xffe0e4b8 } +0xffe6f42f->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4 } +0xffe6f4d8->Object::Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f42f } +0xffe6f42f->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f42f, 0xffe6f4d8 } +Next token is token 'p' (0xffe6f4d8 'p'Exception caught: cleaning lookahead and stack +0x576213f4->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0x576213f4, 0xffe6f4d8 } +0x576213e4->Object::~Object { 0x576213c4, 0x576213d4, 0x576213e4, 0xffe6f4d8 } +0x576213d4->Object::~Object { 0x576213c4, 0x576213d4, 0xffe6f4d8 } +0x576213c4->Object::~Object { 0x576213c4, 0xffe6f4d8 } +0xffe6f4d8->Object::~Object { 0xffe6f4d8 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1360: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x583f9a00->Object::Object { } -Next token is token 'a' (0x583f9a00 'a') -Shifting token 'a' (0x583f9a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x583f9a00 'a') --> $$ = nterm item (0x583f9a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x583f9a30->Object::Object { 0x583f9a00 } -Next token is token 'a' (0x583f9a30 'a') -Shifting token 'a' (0x583f9a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x583f9a30 'a') --> $$ = nterm item (0x583f9a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x583f9a60->Object::Object { 0x583f9a00, 0x583f9a30 } -Next token is token 'a' (0x583f9a60 'a') -Shifting token 'a' (0x583f9a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x583f9a60 'a') --> $$ = nterm item (0x583f9a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x583f9a90->Object::Object { 0x583f9a00, 0x583f9a30, 0x583f9a60 } -Next token is token 'a' (0x583f9a90 'a') -Shifting token 'a' (0x583f9a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x583f9a90 'a') --> $$ = nterm item (0x583f9a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x583f9ac0->Object::Object { 0x583f9a00, 0x583f9a30, 0x583f9a60, 0x583f9a90 } -Next token is token 'p' (0x583f9ac0 'p'Exception caught: cleaning lookahead and stack -0x583f9ac0->Object::~Object { 0x583f9a00, 0x583f9a30, 0x583f9a60, 0x583f9a90, 0x583f9ac0 } -0x583f9a90->Object::~Object { 0x583f9a00, 0x583f9a30, 0x583f9a60, 0x583f9a90 } -0x583f9a60->Object::~Object { 0x583f9a00, 0x583f9a30, 0x583f9a60 } -0x583f9a30->Object::~Object { 0x583f9a00, 0x583f9a30 } -0x583f9a00->Object::~Object { 0x583f9a00 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x583f9a00->Object::Object { } -Next token is token 'a' (0x583f9a00 'a') -Shifting token 'a' (0x583f9a00 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x583f9a00 'a') --> $$ = nterm item (0x583f9a00 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x583f9a30->Object::Object { 0x583f9a00 } -Next token is token 'a' (0x583f9a30 'a') -Shifting token 'a' (0x583f9a30 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x583f9a30 'a') --> $$ = nterm item (0x583f9a30 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x583f9a60->Object::Object { 0x583f9a00, 0x583f9a30 } -Next token is token 'a' (0x583f9a60 'a') -Shifting token 'a' (0x583f9a60 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x583f9a60 'a') --> $$ = nterm item (0x583f9a60 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x583f9a90->Object::Object { 0x583f9a00, 0x583f9a30, 0x583f9a60 } -Next token is token 'a' (0x583f9a90 'a') -Shifting token 'a' (0x583f9a90 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x583f9a90 'a') --> $$ = nterm item (0x583f9a90 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x583f9ac0->Object::Object { 0x583f9a00, 0x583f9a30, 0x583f9a60, 0x583f9a90 } -Next token is token 'p' (0x583f9ac0 'p'Exception caught: cleaning lookahead and stack -0x583f9ac0->Object::~Object { 0x583f9a00, 0x583f9a30, 0x583f9a60, 0x583f9a90, 0x583f9ac0 } -0x583f9a90->Object::~Object { 0x583f9a00, 0x583f9a30, 0x583f9a60, 0x583f9a90 } -0x583f9a60->Object::~Object { 0x583f9a00, 0x583f9a30, 0x583f9a60 } -0x583f9a30->Object::~Object { 0x583f9a00, 0x583f9a30 } -0x583f9a00->Object::~Object { 0x583f9a00 } -exception caught: printer -end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1555: $PREPARSER ./test -stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1555: ./check -./c++.at:1363: $PREPARSER ./input aaaap --std=c++98 not supported -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xfff29fcf->Object::Object { } -0xfff2a078->Object::Object { 0xfff29fcf } -0xfff29fcf->Object::~Object { 0xfff29fcf, 0xfff2a078 } -Next token is token 'a' (0xfff2a078 'a') -0xfff29fe0->Object::Object { 0xfff2a078 } -0xfff29f6b->Object::Object { 0xfff29fe0, 0xfff2a078 } -0xfff29f6b->Object::~Object { 0xfff29f6b, 0xfff29fe0, 0xfff2a078 } -0xfff2a078->Object::~Object { 0xfff29fe0, 0xfff2a078 } -Shifting token 'a' (0xfff29fe0 'a') -0x5667d3c4->Object::Object { 0xfff29fe0 } -0xfff29f6f->Object::Object { 0x5667d3c4, 0xfff29fe0 } -0xfff29f6f->Object::~Object { 0x5667d3c4, 0xfff29f6f, 0xfff29fe0 } -0xfff29fe0->Object::~Object { 0x5667d3c4, 0xfff29fe0 } -Entering state 1 -Stack now 0 1 -0xfff2a088->Object::Object { 0x5667d3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5667d3c4 'a') --> $$ = nterm item (0xfff2a088 'a') -0x5667d3c4->Object::~Object { 0x5667d3c4, 0xfff2a088 } -0x5667d3c4->Object::Object { 0xfff2a088 } -0xfff29fcf->Object::Object { 0x5667d3c4, 0xfff2a088 } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0xfff29fcf, 0xfff2a088 } -0xfff2a088->Object::~Object { 0x5667d3c4, 0xfff2a088 } -Entering state 10 -Stack now 0 10 -Reading a token -0xfff29fcf->Object::Object { 0x5667d3c4 } -0xfff2a078->Object::Object { 0x5667d3c4, 0xfff29fcf } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0xfff29fcf, 0xfff2a078 } -Next token is token 'a' (0xfff2a078 'a') -0xfff29fe0->Object::Object { 0x5667d3c4, 0xfff2a078 } -0xfff29f6b->Object::Object { 0x5667d3c4, 0xfff29fe0, 0xfff2a078 } -0xfff29f6b->Object::~Object { 0x5667d3c4, 0xfff29f6b, 0xfff29fe0, 0xfff2a078 } -0xfff2a078->Object::~Object { 0x5667d3c4, 0xfff29fe0, 0xfff2a078 } -Shifting token 'a' (0xfff29fe0 'a') -0x5667d3d4->Object::Object { 0x5667d3c4, 0xfff29fe0 } -0xfff29f6f->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0 } -0xfff29f6f->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29f6f, 0xfff29fe0 } -0xfff29fe0->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0 } -Entering state 1 -Stack now 0 10 1 -0xfff2a088->Object::Object { 0x5667d3c4, 0x5667d3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5667d3d4 'a') --> $$ = nterm item (0xfff2a088 'a') -0x5667d3d4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a088 } -0x5667d3d4->Object::Object { 0x5667d3c4, 0xfff2a088 } -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a088 } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fcf, 0xfff2a088 } -0xfff2a088->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a088 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4 } -0xfff2a078->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fcf } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fcf, 0xfff2a078 } -Next token is token 'a' (0xfff2a078 'a') -0xfff29fe0->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a078 } -0xfff29f6b->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0, 0xfff2a078 } -0xfff29f6b->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29f6b, 0xfff29fe0, 0xfff2a078 } -0xfff2a078->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0, 0xfff2a078 } -Shifting token 'a' (0xfff29fe0 'a') -0x5667d3e4->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0 } -0xfff29f6f->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0 } -0xfff29f6f->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29f6f, 0xfff29fe0 } -0xfff29fe0->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0 } -Entering state 1 -Stack now 0 10 10 1 -0xfff2a088->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5667d3e4 'a') --> $$ = nterm item (0xfff2a088 'a') -0x5667d3e4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a088 } -0x5667d3e4->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a088 } -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a088 } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fcf, 0xfff2a088 } -0xfff2a088->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a088 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4 } -0xfff2a078->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fcf } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fcf, 0xfff2a078 } -Next token is token 'a' (0xfff2a078 'a') -0xfff29fe0->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a078 } -0xfff29f6b->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0, 0xfff2a078 } -0xfff29f6b->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29f6b, 0xfff29fe0, 0xfff2a078 } -0xfff2a078->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0, 0xfff2a078 } -Shifting token 'a' (0xfff29fe0 'a') -0x5667d3f4->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0 } -0xfff29f6f->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fe0 } -0xfff29f6f->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29f6f, 0xfff29fe0 } -0xfff29fe0->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fe0 } -Entering state 1 -Stack now 0 10 10 10 1 -0xfff2a088->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5667d3f4 'a') --> $$ = nterm item (0xfff2a088 'a') -0x5667d3f4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff2a088 } -0x5667d3f4->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a088 } -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff2a088 } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fcf, 0xfff2a088 } -0xfff2a088->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff2a088 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4 } -0xfff2a078->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fcf } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fcf, 0xfff2a078 } -Next token is token 'p' (0xfff2a078 'p'Exception caught: cleaning lookahead and stack -0x5667d3f4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff2a078 } -0x5667d3e4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a078 } -0x5667d3d4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a078 } -0x5667d3c4->Object::~Object { 0x5667d3c4, 0xfff2a078 } -0xfff2a078->Object::~Object { 0xfff2a078 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xfff29fcf->Object::Object { } -0xfff2a078->Object::Object { 0xfff29fcf } -0xfff29fcf->Object::~Object { 0xfff29fcf, 0xfff2a078 } -Next token is token 'a' (0xfff2a078 'a') -0xfff29fe0->Object::Object { 0xfff2a078 } -0xfff29f6b->Object::Object { 0xfff29fe0, 0xfff2a078 } -0xfff29f6b->Object::~Object { 0xfff29f6b, 0xfff29fe0, 0xfff2a078 } -0xfff2a078->Object::~Object { 0xfff29fe0, 0xfff2a078 } -Shifting token 'a' (0xfff29fe0 'a') -0x5667d3c4->Object::Object { 0xfff29fe0 } -0xfff29f6f->Object::Object { 0x5667d3c4, 0xfff29fe0 } -0xfff29f6f->Object::~Object { 0x5667d3c4, 0xfff29f6f, 0xfff29fe0 } -0xfff29fe0->Object::~Object { 0x5667d3c4, 0xfff29fe0 } -Entering state 1 -Stack now 0 1 -0xfff2a088->Object::Object { 0x5667d3c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5667d3c4 'a') --> $$ = nterm item (0xfff2a088 'a') -0x5667d3c4->Object::~Object { 0x5667d3c4, 0xfff2a088 } -0x5667d3c4->Object::Object { 0xfff2a088 } -0xfff29fcf->Object::Object { 0x5667d3c4, 0xfff2a088 } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0xfff29fcf, 0xfff2a088 } -0xfff2a088->Object::~Object { 0x5667d3c4, 0xfff2a088 } -Entering state 10 -Stack now 0 10 -Reading a token -0xfff29fcf->Object::Object { 0x5667d3c4 } -0xfff2a078->Object::Object { 0x5667d3c4, 0xfff29fcf } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0xfff29fcf, 0xfff2a078 } -Next token is token 'a' (0xfff2a078 'a') -0xfff29fe0->Object::Object { 0x5667d3c4, 0xfff2a078 } -0xfff29f6b->Object::Object { 0x5667d3c4, 0xfff29fe0, 0xfff2a078 } -0xfff29f6b->Object::~Object { 0x5667d3c4, 0xfff29f6b, 0xfff29fe0, 0xfff2a078 } -0xfff2a078->Object::~Object { 0x5667d3c4, 0xfff29fe0, 0xfff2a078 } -Shifting token 'a' (0xfff29fe0 'a') -0x5667d3d4->Object::Object { 0x5667d3c4, 0xfff29fe0 } -0xfff29f6f->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0 } -0xfff29f6f->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29f6f, 0xfff29fe0 } -0xfff29fe0->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0 } -Entering state 1 -Stack now 0 10 1 -0xfff2a088->Object::Object { 0x5667d3c4, 0x5667d3d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5667d3d4 'a') --> $$ = nterm item (0xfff2a088 'a') -0x5667d3d4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a088 } -0x5667d3d4->Object::Object { 0x5667d3c4, 0xfff2a088 } -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a088 } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fcf, 0xfff2a088 } -0xfff2a088->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a088 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4 } -0xfff2a078->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fcf } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fcf, 0xfff2a078 } -Next token is token 'a' (0xfff2a078 'a') -0xfff29fe0->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a078 } -0xfff29f6b->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0, 0xfff2a078 } -0xfff29f6b->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29f6b, 0xfff29fe0, 0xfff2a078 } -0xfff2a078->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0, 0xfff2a078 } -Shifting token 'a' (0xfff29fe0 'a') -0x5667d3e4->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff29fe0 } -0xfff29f6f->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0 } -0xfff29f6f->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29f6f, 0xfff29fe0 } -0xfff29fe0->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0 } -Entering state 1 -Stack now 0 10 10 1 -0xfff2a088->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5667d3e4 'a') --> $$ = nterm item (0xfff2a088 'a') -0x5667d3e4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a088 } -0x5667d3e4->Object::Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a088 } -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a088 } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fcf, 0xfff2a088 } -0xfff2a088->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a088 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4 } -0xfff2a078->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fcf } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fcf, 0xfff2a078 } -Next token is token 'a' (0xfff2a078 'a') -0xfff29fe0->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a078 } -0xfff29f6b->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0, 0xfff2a078 } -0xfff29f6b->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29f6b, 0xfff29fe0, 0xfff2a078 } -0xfff2a078->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0, 0xfff2a078 } -Shifting token 'a' (0xfff29fe0 'a') -0x5667d3f4->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff29fe0 } -0xfff29f6f->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fe0 } -0xfff29f6f->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29f6f, 0xfff29fe0 } -0xfff29fe0->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fe0 } -Entering state 1 -Stack now 0 10 10 10 1 -0xfff2a088->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5667d3f4 'a') --> $$ = nterm item (0xfff2a088 'a') -0x5667d3f4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff2a088 } -0x5667d3f4->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a088 } -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff2a088 } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fcf, 0xfff2a088 } -0xfff2a088->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff2a088 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xfff29fcf->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4 } -0xfff2a078->Object::Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fcf } -0xfff29fcf->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff29fcf, 0xfff2a078 } -Next token is token 'p' (0xfff2a078 'p'Exception caught: cleaning lookahead and stack -0x5667d3f4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0x5667d3f4, 0xfff2a078 } -0x5667d3e4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0x5667d3e4, 0xfff2a078 } -0x5667d3d4->Object::~Object { 0x5667d3c4, 0x5667d3d4, 0xfff2a078 } -0x5667d3c4->Object::~Object { 0x5667d3c4, 0xfff2a078 } -0xfff2a078->Object::~Object { 0xfff2a078 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1555: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1555: ./check --std=c++11 not supported -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae stderr: -stdout: -./c++.at:858: $PREPARSER ./input +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE stderr: -stdout: stderr: +stdout: ./c++.at:1361: $PREPARSER ./input aaaas -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: reduction -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaaT ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaal stderr: +stderr: exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input i +./c++.at:1362: $PREPARSER ./input aaaaR stderr: exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1361: $PREPARSER ./input aaaap stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: +stderr: +stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x576c1a00->Object::Object { } -Next token is token 'a' (0x576c1a00 'a') -Shifting token 'a' (0x576c1a00 'a') +0x57935a00->Object::Object { } +Next token is token 'a' (0x57935a00 'a') +Shifting token 'a' (0x57935a00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x576c1a00 'a') --> $$ = nterm item (0x576c1a00 'a') + $1 = token 'a' (0x57935a00 'a') +-> $$ = nterm item (0x57935a00 'a') Entering state 10 Stack now 0 10 Reading a token -0x576c1a30->Object::Object { 0x576c1a00 } -Next token is token 'a' (0x576c1a30 'a') -Shifting token 'a' (0x576c1a30 'a') +0x57935a30->Object::Object { 0x57935a00 } +Next token is token 'a' (0x57935a30 'a') +Shifting token 'a' (0x57935a30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x576c1a30 'a') --> $$ = nterm item (0x576c1a30 'a') + $1 = token 'a' (0x57935a30 'a') +-> $$ = nterm item (0x57935a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x576c1a60->Object::Object { 0x576c1a00, 0x576c1a30 } -Next token is token 'a' (0x576c1a60 'a') -Shifting token 'a' (0x576c1a60 'a') +0x57935a60->Object::Object { 0x57935a00, 0x57935a30 } +Next token is token 'a' (0x57935a60 'a') +Shifting token 'a' (0x57935a60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x576c1a60 'a') --> $$ = nterm item (0x576c1a60 'a') + $1 = token 'a' (0x57935a60 'a') +-> $$ = nterm item (0x57935a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x576c1a90->Object::Object { 0x576c1a00, 0x576c1a30, 0x576c1a60 } -Next token is token 'a' (0x576c1a90 'a') -Shifting token 'a' (0x576c1a90 'a') +0x57935a90->Object::Object { 0x57935a00, 0x57935a30, 0x57935a60 } +Next token is token 'a' (0x57935a90 'a') +Shifting token 'a' (0x57935a90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x576c1a90 'a') --> $$ = nterm item (0x576c1a90 'a') + $1 = token 'a' (0x57935a90 'a') +-> $$ = nterm item (0x57935a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x576c1ac0->Object::Object { 0x576c1a00, 0x576c1a30, 0x576c1a60, 0x576c1a90 } -Next token is token 'p' (0x576c1ac0 'p'Exception caught: cleaning lookahead and stack -0x576c1ac0->Object::~Object { 0x576c1a00, 0x576c1a30, 0x576c1a60, 0x576c1a90, 0x576c1ac0 } -0x576c1a90->Object::~Object { 0x576c1a00, 0x576c1a30, 0x576c1a60, 0x576c1a90 } -0x576c1a60->Object::~Object { 0x576c1a00, 0x576c1a30, 0x576c1a60 } -0x576c1a30->Object::~Object { 0x576c1a00, 0x576c1a30 } -0x576c1a00->Object::~Object { 0x576c1a00 } +0x57935ac0->Object::Object { 0x57935a00, 0x57935a30, 0x57935a60, 0x57935a90 } +Next token is token 'p' (0x57935ac0 'p'Exception caught: cleaning lookahead and stack +0x57935ac0->Object::~Object { 0x57935a00, 0x57935a30, 0x57935a60, 0x57935a90, 0x57935ac0 } +0x57935a90->Object::~Object { 0x57935a00, 0x57935a30, 0x57935a60, 0x57935a90 } +0x57935a60->Object::~Object { 0x57935a00, 0x57935a30, 0x57935a60 } +0x57935a30->Object::~Object { 0x57935a00, 0x57935a30 } +0x57935a00->Object::~Object { 0x57935a00 } exception caught: printer end { } +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x576c1a00->Object::Object { } -Next token is token 'a' (0x576c1a00 'a') -Shifting token 'a' (0x576c1a00 'a') +0x57935a00->Object::Object { } +Next token is token 'a' (0x57935a00 'a') +Shifting token 'a' (0x57935a00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x576c1a00 'a') --> $$ = nterm item (0x576c1a00 'a') + $1 = token 'a' (0x57935a00 'a') +-> $$ = nterm item (0x57935a00 'a') Entering state 10 Stack now 0 10 Reading a token -0x576c1a30->Object::Object { 0x576c1a00 } -Next token is token 'a' (0x576c1a30 'a') -Shifting token 'a' (0x576c1a30 'a') +0x57935a30->Object::Object { 0x57935a00 } +Next token is token 'a' (0x57935a30 'a') +Shifting token 'a' (0x57935a30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x576c1a30 'a') --> $$ = nterm item (0x576c1a30 'a') + $1 = token 'a' (0x57935a30 'a') +-> $$ = nterm item (0x57935a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x576c1a60->Object::Object { 0x576c1a00, 0x576c1a30 } -Next token is token 'a' (0x576c1a60 'a') -Shifting token 'a' (0x576c1a60 'a') +0x57935a60->Object::Object { 0x57935a00, 0x57935a30 } +Next token is token 'a' (0x57935a60 'a') +Shifting token 'a' (0x57935a60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x576c1a60 'a') --> $$ = nterm item (0x576c1a60 'a') + $1 = token 'a' (0x57935a60 'a') +-> $$ = nterm item (0x57935a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x576c1a90->Object::Object { 0x576c1a00, 0x576c1a30, 0x576c1a60 } -Next token is token 'a' (0x576c1a90 'a') -Shifting token 'a' (0x576c1a90 'a') +0x57935a90->Object::Object { 0x57935a00, 0x57935a30, 0x57935a60 } +Next token is token 'a' (0x57935a90 'a') +Shifting token 'a' (0x57935a90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x576c1a90 'a') --> $$ = nterm item (0x576c1a90 'a') + $1 = token 'a' (0x57935a90 'a') +-> $$ = nterm item (0x57935a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x576c1ac0->Object::Object { 0x576c1a00, 0x576c1a30, 0x576c1a60, 0x576c1a90 } -Next token is token 'p' (0x576c1ac0 'p'Exception caught: cleaning lookahead and stack -0x576c1ac0->Object::~Object { 0x576c1a00, 0x576c1a30, 0x576c1a60, 0x576c1a90, 0x576c1ac0 } -0x576c1a90->Object::~Object { 0x576c1a00, 0x576c1a30, 0x576c1a60, 0x576c1a90 } -0x576c1a60->Object::~Object { 0x576c1a00, 0x576c1a30, 0x576c1a60 } -0x576c1a30->Object::~Object { 0x576c1a00, 0x576c1a30 } -0x576c1a00->Object::~Object { 0x576c1a00 } +0x57935ac0->Object::Object { 0x57935a00, 0x57935a30, 0x57935a60, 0x57935a90 } +Next token is token 'p' (0x57935ac0 'p'Exception caught: cleaning lookahead and stack +0x57935ac0->Object::~Object { 0x57935a00, 0x57935a30, 0x57935a60, 0x57935a90, 0x57935ac0 } +0x57935a90->Object::~Object { 0x57935a00, 0x57935a30, 0x57935a60, 0x57935a90 } +0x57935a60->Object::~Object { 0x57935a00, 0x57935a30, 0x57935a60 } +0x57935a30->Object::~Object { 0x57935a00, 0x57935a30 } +0x57935a00->Object::~Object { 0x57935a00 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr stdout: exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1363: $PREPARSER ./input aaaal stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaT +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR stderr: +exception caught: syntax error +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:1362: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i +./c++.at:1363: $PREPARSER ./input aaaap stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap +./c++.at:1361: $PREPARSER ./input aaaaE stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xffc7096f->Object::Object { } -0xffc70a18->Object::Object { 0xffc7096f } -0xffc7096f->Object::~Object { 0xffc7096f, 0xffc70a18 } -Next token is token 'a' (0xffc70a18 'a') -0xffc70968->Object::Object { 0xffc70a18 } -0xffc70a18->Object::~Object { 0xffc70968, 0xffc70a18 } -Shifting token 'a' (0xffc70968 'a') -0x57f443c4->Object::Object { 0xffc70968 } -0xffc70968->Object::~Object { 0x57f443c4, 0xffc70968 } -Entering state 2 -Stack now 0 2 -0xffc70a28->Object::Object { 0x57f443c4 } +0xff98d16f->Object::Object { } +0xff98d218->Object::Object { 0xff98d16f } +0xff98d16f->Object::~Object { 0xff98d16f, 0xff98d218 } +Next token is token 'a' (0xff98d218 'a') +0xff98d180->Object::Object { 0xff98d218 } +0xff98d10b->Object::Object { 0xff98d180, 0xff98d218 } +0xff98d10b->Object::~Object { 0xff98d10b, 0xff98d180, 0xff98d218 } +0xff98d218->Object::~Object { 0xff98d180, 0xff98d218 } +Shifting token 'a' (0xff98d180 'a') +0x585083c4->Object::Object { 0xff98d180 } +0xff98d10f->Object::Object { 0x585083c4, 0xff98d180 } +0xff98d10f->Object::~Object { 0x585083c4, 0xff98d10f, 0xff98d180 } +0xff98d180->Object::~Object { 0x585083c4, 0xff98d180 } +Entering state 1 +Stack now 0 1 +0xff98d228->Object::Object { 0x585083c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57f443c4 'a') --> $$ = nterm item (0xffc70a28 'a') -0x57f443c4->Object::~Object { 0x57f443c4, 0xffc70a28 } -0x57f443c4->Object::Object { 0xffc70a28 } -0xffc70a28->Object::~Object { 0x57f443c4, 0xffc70a28 } -Entering state 11 -Stack now 0 11 + $1 = token 'a' (0x585083c4 'a') +-> $$ = nterm item (0xff98d228 'a') +0x585083c4->Object::~Object { 0x585083c4, 0xff98d228 } +0x585083c4->Object::Object { 0xff98d228 } +0xff98d16f->Object::Object { 0x585083c4, 0xff98d228 } +0xff98d16f->Object::~Object { 0x585083c4, 0xff98d16f, 0xff98d228 } +0xff98d228->Object::~Object { 0x585083c4, 0xff98d228 } +Entering state 10 +Stack now 0 10 Reading a token -0xffc7096f->Object::Object { 0x57f443c4 } -0xffc70a18->Object::Object { 0x57f443c4, 0xffc7096f } -0xffc7096f->Object::~Object { 0x57f443c4, 0xffc7096f, 0xffc70a18 } -Next token is token 'a' (0xffc70a18 'a') -0xffc70968->Object::Object { 0x57f443c4, 0xffc70a18 } -0xffc70a18->Object::~Object { 0x57f443c4, 0xffc70968, 0xffc70a18 } -Shifting token 'a' (0xffc70968 'a') -0x57f443d4->Object::Object { 0x57f443c4, 0xffc70968 } -0xffc70968->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70968 } -Entering state 2 -Stack now 0 11 2 -0xffc70a28->Object::Object { 0x57f443c4, 0x57f443d4 } +0xff98d16f->Object::Object { 0x585083c4 } +0xff98d218->Object::Object { 0x585083c4, 0xff98d16f } +0xff98d16f->Object::~Object { 0x585083c4, 0xff98d16f, 0xff98d218 } +Next token is token 'a' (0xff98d218 'a') +0xff98d180->Object::Object { 0x585083c4, 0xff98d218 } +0xff98d10b->Object::Object { 0x585083c4, 0xff98d180, 0xff98d218 } +0xff98d10b->Object::~Object { 0x585083c4, 0xff98d10b, 0xff98d180, 0xff98d218 } +0xff98d218->Object::~Object { 0x585083c4, 0xff98d180, 0xff98d218 } +Shifting token 'a' (0xff98d180 'a') +0x585083d4->Object::Object { 0x585083c4, 0xff98d180 } +0xff98d10f->Object::Object { 0x585083c4, 0x585083d4, 0xff98d180 } +0xff98d10f->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d10f, 0xff98d180 } +0xff98d180->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d180 } +Entering state 1 +Stack now 0 10 1 +0xff98d228->Object::Object { 0x585083c4, 0x585083d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57f443d4 'a') --> $$ = nterm item (0xffc70a28 'a') -0x57f443d4->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70a28 } -0x57f443d4->Object::Object { 0x57f443c4, 0xffc70a28 } -0xffc70a28->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70a28 } -Entering state 11 -Stack now 0 11 11 + $1 = token 'a' (0x585083d4 'a') +-> $$ = nterm item (0xff98d228 'a') +0x585083d4->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d228 } +0x585083d4->Object::Object { 0x585083c4, 0xff98d228 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0xff98d228 } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d16f, 0xff98d228 } +0xff98d228->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d228 } +Entering state 10 +Stack now 0 10 10 Reading a token -0xffc7096f->Object::Object { 0x57f443c4, 0x57f443d4 } -0xffc70a18->Object::Object { 0x57f443c4, 0x57f443d4, 0xffc7096f } -0xffc7096f->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc7096f, 0xffc70a18 } -Next token is token 'a' (0xffc70a18 'a') -0xffc70968->Object::Object { 0x57f443c4, 0x57f443d4, 0xffc70a18 } -0xffc70a18->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70968, 0xffc70a18 } -Shifting token 'a' (0xffc70968 'a') -0x57f443e4->Object::Object { 0x57f443c4, 0x57f443d4, 0xffc70968 } -0xffc70968->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70968 } -Entering state 2 -Stack now 0 11 11 2 -0xffc70a28->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4 } +0xff98d218->Object::Object { 0x585083c4, 0x585083d4, 0xff98d16f } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d16f, 0xff98d218 } +Next token is token 'a' (0xff98d218 'a') +0xff98d180->Object::Object { 0x585083c4, 0x585083d4, 0xff98d218 } +0xff98d10b->Object::Object { 0x585083c4, 0x585083d4, 0xff98d180, 0xff98d218 } +0xff98d10b->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d10b, 0xff98d180, 0xff98d218 } +0xff98d218->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d180, 0xff98d218 } +Shifting token 'a' (0xff98d180 'a') +0x585083e4->Object::Object { 0x585083c4, 0x585083d4, 0xff98d180 } +0xff98d10f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180 } +0xff98d10f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d10f, 0xff98d180 } +0xff98d180->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180 } +Entering state 1 +Stack now 0 10 10 1 +0xff98d228->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57f443e4 'a') --> $$ = nterm item (0xffc70a28 'a') -0x57f443e4->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a28 } -0x57f443e4->Object::Object { 0x57f443c4, 0x57f443d4, 0xffc70a28 } -0xffc70a28->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a28 } -Entering state 11 -Stack now 0 11 11 11 + $1 = token 'a' (0x585083e4 'a') +-> $$ = nterm item (0xff98d228 'a') +0x585083e4->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d228 } +0x585083e4->Object::Object { 0x585083c4, 0x585083d4, 0xff98d228 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d228 } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d16f, 0xff98d228 } +0xff98d228->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d228 } +Entering state 10 +Stack now 0 10 10 10 Reading a token -0xffc7096f->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4 } -0xffc70a18->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc7096f } -0xffc7096f->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc7096f, 0xffc70a18 } -Next token is token 'a' (0xffc70a18 'a') -0xffc70968->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a18 } -0xffc70a18->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70968, 0xffc70a18 } -Shifting token 'a' (0xffc70968 'a') -0x57f443f4->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70968 } -0xffc70968->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc70968 } -Entering state 2 -Stack now 0 11 11 11 2 -0xffc70a28->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4 } +0xff98d218->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d16f } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d16f, 0xff98d218 } +Next token is token 'a' (0xff98d218 'a') +0xff98d180->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d218 } +0xff98d10b->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180, 0xff98d218 } +0xff98d10b->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d10b, 0xff98d180, 0xff98d218 } +0xff98d218->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180, 0xff98d218 } +Shifting token 'a' (0xff98d180 'a') +0x585083f4->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180 } +0xff98d10f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d180 } +0xff98d10f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d10f, 0xff98d180 } +0xff98d180->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d180 } +Entering state 1 +Stack now 0 10 10 10 1 +0xff98d228->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57f443f4 'a') --> $$ = nterm item (0xffc70a28 'a') -0x57f443f4->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc70a28 } -0x57f443f4->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a28 } -0xffc70a28->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc70a28 } -Entering state 11 -Stack now 0 11 11 11 11 + $1 = token 'a' (0x585083f4 'a') +-> $$ = nterm item (0xff98d228 'a') +0x585083f4->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d228 } +0x585083f4->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d228 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d228 } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d16f, 0xff98d228 } +0xff98d228->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d228 } +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -0xffc7096f->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4 } -0xffc70a18->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc7096f } -0xffc7096f->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc7096f, 0xffc70a18 } -Next token is token 'p' (0xffc70a18 'p'Exception caught: cleaning lookahead and stack -0x57f443f4->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc70a18 } -0x57f443e4->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a18 } -0x57f443d4->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70a18 } -0x57f443c4->Object::~Object { 0x57f443c4, 0xffc70a18 } -0xffc70a18->Object::~Object { 0xffc70a18 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4 } +0xff98d218->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d16f } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d16f, 0xff98d218 } +Next token is token 'p' (0xff98d218 'p'Exception caught: cleaning lookahead and stack +0x585083f4->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d218 } +0x585083e4->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d218 } +0x585083d4->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d218 } +0x585083c4->Object::~Object { 0x585083c4, 0xff98d218 } +0xff98d218->Object::~Object { 0xff98d218 } exception caught: printer end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xffc7096f->Object::Object { } -0xffc70a18->Object::Object { 0xffc7096f } -0xffc7096f->Object::~Object { 0xffc7096f, 0xffc70a18 } -Next token is token 'a' (0xffc70a18 'a') -0xffc70968->Object::Object { 0xffc70a18 } -0xffc70a18->Object::~Object { 0xffc70968, 0xffc70a18 } -Shifting token 'a' (0xffc70968 'a') -0x57f443c4->Object::Object { 0xffc70968 } -0xffc70968->Object::~Object { 0x57f443c4, 0xffc70968 } -Entering state 2 -Stack now 0 2 -0xffc70a28->Object::Object { 0x57f443c4 } +0xff98d16f->Object::Object { } +0xff98d218->Object::Object { 0xff98d16f } +0xff98d16f->Object::~Object { 0xff98d16f, 0xff98d218 } +Next token is token 'a' (0xff98d218 'a') +0xff98d180->Object::Object { 0xff98d218 } +0xff98d10b->Object::Object { 0xff98d180, 0xff98d218 } +0xff98d10b->Object::~Object { 0xff98d10b, 0xff98d180, 0xff98d218 } +0xff98d218->Object::~Object { 0xff98d180, 0xff98d218 } +Shifting token 'a' (0xff98d180 'a') +0x585083c4->Object::Object { 0xff98d180 } +0xff98d10f->Object::Object { 0x585083c4, 0xff98d180 } +0xff98d10f->Object::~Object { 0x585083c4, 0xff98d10f, 0xff98d180 } +0xff98d180->Object::~Object { 0x585083c4, 0xff98d180 } +Entering state 1 +Stack now 0 1 +0xff98d228->Object::Object { 0x585083c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57f443c4 'a') --> $$ = nterm item (0xffc70a28 'a') -0x57f443c4->Object::~Object { 0x57f443c4, 0xffc70a28 } -0x57f443c4->Object::Object { 0xffc70a28 } -0xffc70a28->Object::~Object { 0x57f443c4, 0xffc70a28 } -Entering state 11 -Stack now 0 11 + $1 = token 'a' (0x585083c4 'a') +-> $$ = nterm item (0xff98d228 'a') +0x585083c4->Object::~Object { 0x585083c4, 0xff98d228 } +0x585083c4->Object::Object { 0xff98d228 } +0xff98d16f->Object::Object { 0x585083c4, 0xff98d228 } +0xff98d16f->Object::~Object { 0x585083c4, 0xff98d16f, 0xff98d228 } +0xff98d228->Object::~Object { 0x585083c4, 0xff98d228 } +Entering state 10 +Stack now 0 10 Reading a token -0xffc7096f->Object::Object { 0x57f443c4 } -0xffc70a18->Object::Object { 0x57f443c4, 0xffc7096f } -0xffc7096f->Object::~Object { 0x57f443c4, 0xffc7096f, 0xffc70a18 } -Next token is token 'a' (0xffc70a18 'a') -0xffc70968->Object::Object { 0x57f443c4, 0xffc70a18 } -0xffc70a18->Object::~Object { 0x57f443c4, 0xffc70968, 0xffc70a18 } -Shifting token 'a' (0xffc70968 'a') -0x57f443d4->Object::Object { 0x57f443c4, 0xffc70968 } -0xffc70968->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70968 } -Entering state 2 -Stack now 0 11 2 -0xffc70a28->Object::Object { 0x57f443c4, 0x57f443d4 } +0xff98d16f->Object::Object { 0x585083c4 } +0xff98d218->Object::Object { 0x585083c4, 0xff98d16f } +0xff98d16f->Object::~Object { 0x585083c4, 0xff98d16f, 0xff98d218 } +Next token is token 'a' (0xff98d218 'a') +0xff98d180->Object::Object { 0x585083c4, 0xff98d218 } +0xff98d10b->Object::Object { 0x585083c4, 0xff98d180, 0xff98d218 } +0xff98d10b->Object::~Object { 0x585083c4, 0xff98d10b, 0xff98d180, 0xff98d218 } +0xff98d218->Object::~Object { 0x585083c4, 0xff98d180, 0xff98d218 } +Shifting token 'a' (0xff98d180 'a') +0x585083d4->Object::Object { 0x585083c4, 0xff98d180 } +0xff98d10f->Object::Object { 0x585083c4, 0x585083d4, 0xff98d180 } +0xff98d10f->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d10f, 0xff98d180 } +0xff98d180->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d180 } +Entering state 1 +Stack now 0 10 1 +0xff98d228->Object::Object { 0x585083c4, 0x585083d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57f443d4 'a') --> $$ = nterm item (0xffc70a28 'a') -0x57f443d4->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70a28 } -0x57f443d4->Object::Object { 0x57f443c4, 0xffc70a28 } -0xffc70a28->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70a28 } -Entering state 11 -Stack now 0 11 11 + $1 = token 'a' (0x585083d4 'a') +-> $$ = nterm item (0xff98d228 'a') +0x585083d4->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d228 } +0x585083d4->Object::Object { 0x585083c4, 0xff98d228 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0xff98d228 } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d16f, 0xff98d228 } +0xff98d228->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d228 } +Entering state 10 +Stack now 0 10 10 Reading a token -0xffc7096f->Object::Object { 0x57f443c4, 0x57f443d4 } -0xffc70a18->Object::Object { 0x57f443c4, 0x57f443d4, 0xffc7096f } -0xffc7096f->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc7096f, 0xffc70a18 } -Next token is token 'a' (0xffc70a18 'a') -0xffc70968->Object::Object { 0x57f443c4, 0x57f443d4, 0xffc70a18 } -0xffc70a18->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70968, 0xffc70a18 } -Shifting token 'a' (0xffc70968 'a') -0x57f443e4->Object::Object { 0x57f443c4, 0x57f443d4, 0xffc70968 } -0xffc70968->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70968 } -Entering state 2 -Stack now 0 11 11 2 -0xffc70a28->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4 } +0xff98d218->Object::Object { 0x585083c4, 0x585083d4, 0xff98d16f } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d16f, 0xff98d218 } +Next token is token 'a' (0xff98d218 'a') +0xff98d180->Object::Object { 0x585083c4, 0x585083d4, 0xff98d218 } +0xff98d10b->Object::Object { 0x585083c4, 0x585083d4, 0xff98d180, 0xff98d218 } +0xff98d10b->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d10b, 0xff98d180, 0xff98d218 } +0xff98d218->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d180, 0xff98d218 } +Shifting token 'a' (0xff98d180 'a') +0x585083e4->Object::Object { 0x585083c4, 0x585083d4, 0xff98d180 } +0xff98d10f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180 } +0xff98d10f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d10f, 0xff98d180 } +0xff98d180->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180 } +Entering state 1 +Stack now 0 10 10 1 +0xff98d228->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57f443e4 'a') --> $$ = nterm item (0xffc70a28 'a') -0x57f443e4->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a28 } -0x57f443e4->Object::Object { 0x57f443c4, 0x57f443d4, 0xffc70a28 } -0xffc70a28->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a28 } -Entering state 11 -Stack now 0 11 11 11 + $1 = token 'a' (0x585083e4 'a') +-> $$ = nterm item (0xff98d228 'a') +0x585083e4->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d228 } +0x585083e4->Object::Object { 0x585083c4, 0x585083d4, 0xff98d228 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d228 } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d16f, 0xff98d228 } +0xff98d228->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d228 } +Entering state 10 +Stack now 0 10 10 10 Reading a token -0xffc7096f->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4 } -0xffc70a18->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc7096f } -0xffc7096f->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc7096f, 0xffc70a18 } -Next token is token 'a' (0xffc70a18 'a') -0xffc70968->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a18 } -0xffc70a18->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70968, 0xffc70a18 } -Shifting token 'a' (0xffc70968 'a') -0x57f443f4->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70968 } -0xffc70968->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc70968 } -Entering state 2 -Stack now 0 11 11 11 2 -0xffc70a28->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4 } +0xff98d218->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d16f } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d16f, 0xff98d218 } +Next token is token 'a' (0xff98d218 'a') +0xff98d180->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d218 } +0xff98d10b->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180, 0xff98d218 } +0xff98d10b->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d10b, 0xff98d180, 0xff98d218 } +0xff98d218->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180, 0xff98d218 } +Shifting token 'a' (0xff98d180 'a') +0x585083f4->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d180 } +0xff98d10f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d180 } +0xff98d10f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d10f, 0xff98d180 } +0xff98d180->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d180 } +Entering state 1 +Stack now 0 10 10 10 1 +0xff98d228->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57f443f4 'a') --> $$ = nterm item (0xffc70a28 'a') -0x57f443f4->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc70a28 } -0x57f443f4->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a28 } -0xffc70a28->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc70a28 } -Entering state 11 -Stack now 0 11 11 11 11 + $1 = token 'a' (0x585083f4 'a') +-> $$ = nterm item (0xff98d228 'a') +0x585083f4->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d228 } +0x585083f4->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d228 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d228 } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d16f, 0xff98d228 } +0xff98d228->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d228 } +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -0xffc7096f->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4 } -0xffc70a18->Object::Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc7096f } -0xffc7096f->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc7096f, 0xffc70a18 } -Next token is token 'p' (0xffc70a18 'p'Exception caught: cleaning lookahead and stack -0x57f443f4->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0x57f443f4, 0xffc70a18 } -0x57f443e4->Object::~Object { 0x57f443c4, 0x57f443d4, 0x57f443e4, 0xffc70a18 } -0x57f443d4->Object::~Object { 0x57f443c4, 0x57f443d4, 0xffc70a18 } -0x57f443c4->Object::~Object { 0x57f443c4, 0xffc70a18 } -0xffc70a18->Object::~Object { 0xffc70a18 } +0xff98d16f->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4 } +0xff98d218->Object::Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d16f } +0xff98d16f->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d16f, 0xff98d218 } +Next token is token 'p' (0xff98d218 'p'Exception caught: cleaning lookahead and stack +0x585083f4->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0x585083f4, 0xff98d218 } +0x585083e4->Object::~Object { 0x585083c4, 0x585083d4, 0x585083e4, 0xff98d218 } +0x585083d4->Object::~Object { 0x585083c4, 0x585083d4, 0xff98d218 } +0x585083c4->Object::~Object { 0x585083c4, 0xff98d218 } +0xff98d218->Object::~Object { 0xff98d218 } exception caught: printer end { } -./c++.at:1362: grep '^exception caught: printer$' stderr +./c++.at:1363: grep '^exception caught: printer$' stderr stdout: exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae +./c++.at:1363: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh +./c++.at:1363: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x2.cc x2.yy +./c++.at:1361: $PREPARSER ./input aaaaT +./c++.at:1363: $PREPARSER ./input aaaaT stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1471: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x2.o x2.cc +./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stderr: stdout: -./c++.at:1555: $PREPARSER ./test +./c++.at:850: $PREPARSER ./input +stdout: +======== Testing with C++ standard flags: '' stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./c++.at:573: $here/modern +stdout: +Modern C++: 201703 +./c++.at:573: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:574: $here/modern +stdout: +Modern C++: 201703 +./c++.at:574: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +683. c++.at:1065: ok + +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +691. c++.at:1517: testing Default action ... +======== Testing with C++ standard flags: '' +682. c++.at:1064: ok ./c++.at:1360: $PREPARSER ./input aaaal +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: exception caught: yylex ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ./c++.at:1360: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./c++.at:1360: $PREPARSER ./input aaaap +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input --debug aaaap @@ -256858,125 +254648,134 @@ Entering state 0 Stack now 0 Reading a token -0x57fbda00->Object::Object { } -Next token is token 'a' (0x57fbda00 'a') -Shifting token 'a' (0x57fbda00 'a') +0x56c3da00->Object::Object { } +Next token is token 'a' (0x56c3da00 'a') +Shifting token 'a' (0x56c3da00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57fbda00 'a') --> $$ = nterm item (0x57fbda00 'a') + $1 = token 'a' (0x56c3da00 'a') +-> $$ = nterm item (0x56c3da00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57fbda30->Object::Object { 0x57fbda00 } -Next token is token 'a' (0x57fbda30 'a') -Shifting token 'a' (0x57fbda30 'a') +0x56c3da30->Object::Object { 0x56c3da00 } +Next token is token 'a' (0x56c3da30 'a') +Shifting token 'a' (0x56c3da30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57fbda30 'a') --> $$ = nterm item (0x57fbda30 'a') + $1 = token 'a' (0x56c3da30 'a') +-> $$ = nterm item (0x56c3da30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57fbda60->Object::Object { 0x57fbda00, 0x57fbda30 } -Next token is token 'a' (0x57fbda60 'a') -Shifting token 'a' (0x57fbda60 'a') +0x56c3da60->Object::Object { 0x56c3da00, 0x56c3da30 } +Next token is token 'a' (0x56c3da60 'a') +Shifting token 'a' (0x56c3da60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57fbda60 'a') --> $$ = nterm item (0x57fbda60 'a') + $1 = token 'a' (0x56c3da60 'a') +-> $$ = nterm item (0x56c3da60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57fbda90->Object::Object { 0x57fbda00, 0x57fbda30, 0x57fbda60 } -Next token is token 'a' (0x57fbda90 'a') -Shifting token 'a' (0x57fbda90 'a') +0x56c3da90->Object::Object { 0x56c3da00, 0x56c3da30, 0x56c3da60 } +Next token is token 'a' (0x56c3da90 'a') +Shifting token 'a' (0x56c3da90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57fbda90 'a') --> $$ = nterm item (0x57fbda90 'a') + $1 = token 'a' (0x56c3da90 'a') +-> $$ = nterm item (0x56c3da90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57fbdac0->Object::Object { 0x57fbda00, 0x57fbda30, 0x57fbda60, 0x57fbda90 } -Next token is token 'p' (0x57fbdac0 'p'Exception caught: cleaning lookahead and stack -0x57fbdac0->Object::~Object { 0x57fbda00, 0x57fbda30, 0x57fbda60, 0x57fbda90, 0x57fbdac0 } -0x57fbda90->Object::~Object { 0x57fbda00, 0x57fbda30, 0x57fbda60, 0x57fbda90 } -0x57fbda60->Object::~Object { 0x57fbda00, 0x57fbda30, 0x57fbda60 } -0x57fbda30->Object::~Object { 0x57fbda00, 0x57fbda30 } -0x57fbda00->Object::~Object { 0x57fbda00 } +0x56c3dac0->Object::Object { 0x56c3da00, 0x56c3da30, 0x56c3da60, 0x56c3da90 } +Next token is token 'p' (0x56c3dac0 'p'Exception caught: cleaning lookahead and stack +0x56c3dac0->Object::~Object { 0x56c3da00, 0x56c3da30, 0x56c3da60, 0x56c3da90, 0x56c3dac0 } +0x56c3da90->Object::~Object { 0x56c3da00, 0x56c3da30, 0x56c3da60, 0x56c3da90 } +0x56c3da60->Object::~Object { 0x56c3da00, 0x56c3da30, 0x56c3da60 } +0x56c3da30->Object::~Object { 0x56c3da00, 0x56c3da30 } +0x56c3da00->Object::~Object { 0x56c3da00 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x57fbda00->Object::Object { } -Next token is token 'a' (0x57fbda00 'a') -Shifting token 'a' (0x57fbda00 'a') +0x56c3da00->Object::Object { } +Next token is token 'a' (0x56c3da00 'a') +Shifting token 'a' (0x56c3da00 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57fbda00 'a') --> $$ = nterm item (0x57fbda00 'a') + $1 = token 'a' (0x56c3da00 'a') +-> $$ = nterm item (0x56c3da00 'a') Entering state 11 Stack now 0 11 Reading a token -0x57fbda30->Object::Object { 0x57fbda00 } -Next token is token 'a' (0x57fbda30 'a') -Shifting token 'a' (0x57fbda30 'a') +0x56c3da30->Object::Object { 0x56c3da00 } +Next token is token 'a' (0x56c3da30 'a') +Shifting token 'a' (0x56c3da30 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57fbda30 'a') --> $$ = nterm item (0x57fbda30 'a') + $1 = token 'a' (0x56c3da30 'a') +-> $$ = nterm item (0x56c3da30 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x57fbda60->Object::Object { 0x57fbda00, 0x57fbda30 } -Next token is token 'a' (0x57fbda60 'a') -Shifting token 'a' (0x57fbda60 'a') +0x56c3da60->Object::Object { 0x56c3da00, 0x56c3da30 } +Next token is token 'a' (0x56c3da60 'a') +Shifting token 'a' (0x56c3da60 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57fbda60 'a') --> $$ = nterm item (0x57fbda60 'a') + $1 = token 'a' (0x56c3da60 'a') +-> $$ = nterm item (0x56c3da60 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x57fbda90->Object::Object { 0x57fbda00, 0x57fbda30, 0x57fbda60 } -Next token is token 'a' (0x57fbda90 'a') -Shifting token 'a' (0x57fbda90 'a') +0x56c3da90->Object::Object { 0x56c3da00, 0x56c3da30, 0x56c3da60 } +Next token is token 'a' (0x56c3da90 'a') +Shifting token 'a' (0x56c3da90 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x57fbda90 'a') --> $$ = nterm item (0x57fbda90 'a') + $1 = token 'a' (0x56c3da90 'a') +-> $$ = nterm item (0x56c3da90 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x57fbdac0->Object::Object { 0x57fbda00, 0x57fbda30, 0x57fbda60, 0x57fbda90 } -Next token is token 'p' (0x57fbdac0 'p'Exception caught: cleaning lookahead and stack -0x57fbdac0->Object::~Object { 0x57fbda00, 0x57fbda30, 0x57fbda60, 0x57fbda90, 0x57fbdac0 } -0x57fbda90->Object::~Object { 0x57fbda00, 0x57fbda30, 0x57fbda60, 0x57fbda90 } -0x57fbda60->Object::~Object { 0x57fbda00, 0x57fbda30, 0x57fbda60 } -0x57fbda30->Object::~Object { 0x57fbda00, 0x57fbda30 } -0x57fbda00->Object::~Object { 0x57fbda00 } +0x56c3dac0->Object::Object { 0x56c3da00, 0x56c3da30, 0x56c3da60, 0x56c3da90 } +Next token is token 'p' (0x56c3dac0 'p'Exception caught: cleaning lookahead and stack +0x56c3dac0->Object::~Object { 0x56c3da00, 0x56c3da30, 0x56c3da60, 0x56c3da90, 0x56c3dac0 } +0x56c3da90->Object::~Object { 0x56c3da00, 0x56c3da30, 0x56c3da60, 0x56c3da90 } +0x56c3da60->Object::~Object { 0x56c3da00, 0x56c3da30, 0x56c3da60 } +0x56c3da30->Object::~Object { 0x56c3da00, 0x56c3da30 } +0x56c3da00->Object::~Object { 0x56c3da00 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr +692. java.at:25: testing Java invalid directives ... +./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y stdout: exception caught: printer ./c++.at:1360: $PREPARSER ./input aaaae stderr: exception caught: syntax error ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' @@ -256985,812 +254784,998 @@ stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaR +./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y +stderr: stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -685. c++.at:1360: ok - -692. java.at:25: testing Java invalid directives ... -./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y -./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1501: $CXX $CPPFLAGS $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS 692. java.at:25: ok + +693. java.at:186: testing Java parser class and package names ... +./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:188: grep '[mb]4_' YYParser.y +stdout: +693. java.at:186: skipped (java.at:188) stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +694. java.at:217: testing Java parser class modifiers ... +./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./java.at:219: grep '[mb]4_' YYParser.y stdout: +./c++.at:851: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +694. java.at:217: skipped (java.at:219) ======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +695. java.at:287: testing Java parser class extends and implements ... +./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +stderr: stderr: stdout: -./c++.at:858: $PREPARSER ./input +stdout: +./c++.at:568: $here/modern +./c++.at:856: $PREPARSER ./input +stdout: stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Modern C++: 202002 +./c++.at:568: $PREPARSER ./list stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:857: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./java.at:289: grep '[mb]4_' YYParser.y +stdout: +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +695. java.at:287: ======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y + skipped (java.at:289) + ======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaas +./c++.at:1502: $PREPARSER ./parser +stderr: +./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +690. c++.at:1422: ok +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS + +696. java.at:307: testing Java %parse-param and %lex-param ... +./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:309: grep '[mb]4_' YYParser.y +stdout: +696. java.at:307: stderr: +stdout: + skipped (java.at:309) +./c++.at:1361: $PREPARSER ./input aaaas stderr: exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal +697. java.at:381: testing Java throws specifications ... stderr: exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -693. java.at:186: testing Java parser class and package names ... -./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1363: $PREPARSER ./input i -./java.at:188: grep '[mb]4_' YYParser.y -stdout: -693. java.at:186: stderr: -exception caught: initial-action - skipped (java.at:188) -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1363: $PREPARSER ./input aaaap +./c++.at:1361: $PREPARSER ./input i stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./java.at:441: grep '[mb]4_' YYParser.y +stdout: +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +697. java.at:381: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap + skipped (java.at:441) stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xffda6fdf->Object::Object { } -0xffda7088->Object::Object { 0xffda6fdf } -0xffda6fdf->Object::~Object { 0xffda6fdf, 0xffda7088 } -Next token is token 'a' (0xffda7088 'a') -0xffda6ff0->Object::Object { 0xffda7088 } -0xffda6f7b->Object::Object { 0xffda6ff0, 0xffda7088 } -0xffda6f7b->Object::~Object { 0xffda6f7b, 0xffda6ff0, 0xffda7088 } -0xffda7088->Object::~Object { 0xffda6ff0, 0xffda7088 } -Shifting token 'a' (0xffda6ff0 'a') -0x56b373c4->Object::Object { 0xffda6ff0 } -0xffda6f7f->Object::Object { 0x56b373c4, 0xffda6ff0 } -0xffda6f7f->Object::~Object { 0x56b373c4, 0xffda6f7f, 0xffda6ff0 } -0xffda6ff0->Object::~Object { 0x56b373c4, 0xffda6ff0 } +0x57cc9a00->Object::Object { } +Next token is token 'a' (0x57cc9a00 'a') +Shifting token 'a' (0x57cc9a00 'a') Entering state 1 Stack now 0 1 -0xffda7098->Object::Object { 0x56b373c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56b373c4 'a') --> $$ = nterm item (0xffda7098 'a') -0x56b373c4->Object::~Object { 0x56b373c4, 0xffda7098 } -0x56b373c4->Object::Object { 0xffda7098 } -0xffda6fdf->Object::Object { 0x56b373c4, 0xffda7098 } -0xffda6fdf->Object::~Object { 0x56b373c4, 0xffda6fdf, 0xffda7098 } -0xffda7098->Object::~Object { 0x56b373c4, 0xffda7098 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57cc9a00 'a') +-> $$ = nterm item (0x57cc9a00 'a') Entering state 10 Stack now 0 10 Reading a token -0xffda6fdf->Object::Object { 0x56b373c4 } -0xffda7088->Object::Object { 0x56b373c4, 0xffda6fdf } -0xffda6fdf->Object::~Object { 0x56b373c4, 0xffda6fdf, 0xffda7088 } -Next token is token 'a' (0xffda7088 'a') -0xffda6ff0->Object::Object { 0x56b373c4, 0xffda7088 } -0xffda6f7b->Object::Object { 0x56b373c4, 0xffda6ff0, 0xffda7088 } -0xffda6f7b->Object::~Object { 0x56b373c4, 0xffda6f7b, 0xffda6ff0, 0xffda7088 } -0xffda7088->Object::~Object { 0x56b373c4, 0xffda6ff0, 0xffda7088 } -Shifting token 'a' (0xffda6ff0 'a') -0x56b373d4->Object::Object { 0x56b373c4, 0xffda6ff0 } -0xffda6f7f->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0 } -0xffda6f7f->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6f7f, 0xffda6ff0 } -0xffda6ff0->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0 } +0x57cc9a30->Object::Object { 0x57cc9a00 } +Next token is token 'a' (0x57cc9a30 'a') +Shifting token 'a' (0x57cc9a30 'a') Entering state 1 Stack now 0 10 1 -0xffda7098->Object::Object { 0x56b373c4, 0x56b373d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56b373d4 'a') --> $$ = nterm item (0xffda7098 'a') -0x56b373d4->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda7098 } -0x56b373d4->Object::Object { 0x56b373c4, 0xffda7098 } -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda7098 } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6fdf, 0xffda7098 } -0xffda7098->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda7098 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57cc9a30 'a') +-> $$ = nterm item (0x57cc9a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4 } -0xffda7088->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda6fdf } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6fdf, 0xffda7088 } -Next token is token 'a' (0xffda7088 'a') -0xffda6ff0->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda7088 } -0xffda6f7b->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0, 0xffda7088 } -0xffda6f7b->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6f7b, 0xffda6ff0, 0xffda7088 } -0xffda7088->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0, 0xffda7088 } -Shifting token 'a' (0xffda6ff0 'a') -0x56b373e4->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0 } -0xffda6f7f->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0 } -0xffda6f7f->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6f7f, 0xffda6ff0 } -0xffda6ff0->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0 } +0x57cc9a60->Object::Object { 0x57cc9a00, 0x57cc9a30 } +Next token is token 'a' (0x57cc9a60 'a') +Shifting token 'a' (0x57cc9a60 'a') Entering state 1 Stack now 0 10 10 1 -0xffda7098->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56b373e4 'a') --> $$ = nterm item (0xffda7098 'a') -0x56b373e4->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7098 } -0x56b373e4->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda7098 } -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7098 } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6fdf, 0xffda7098 } -0xffda7098->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7098 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57cc9a60 'a') +-> $$ = nterm item (0x57cc9a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4 } -0xffda7088->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6fdf } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6fdf, 0xffda7088 } -Next token is token 'a' (0xffda7088 'a') -0xffda6ff0->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7088 } -0xffda6f7b->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0, 0xffda7088 } -0xffda6f7b->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6f7b, 0xffda6ff0, 0xffda7088 } -0xffda7088->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0, 0xffda7088 } -Shifting token 'a' (0xffda6ff0 'a') -0x56b373f4->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0 } -0xffda6f7f->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6ff0 } -0xffda6f7f->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6f7f, 0xffda6ff0 } -0xffda6ff0->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6ff0 } +0x57cc9a90->Object::Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60 } +Next token is token 'a' (0x57cc9a90 'a') +Shifting token 'a' (0x57cc9a90 'a') Entering state 1 Stack now 0 10 10 10 1 -0xffda7098->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56b373f4 'a') --> $$ = nterm item (0xffda7098 'a') -0x56b373f4->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda7098 } -0x56b373f4->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7098 } -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda7098 } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6fdf, 0xffda7098 } -0xffda7098->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda7098 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57cc9a90 'a') +-> $$ = nterm item (0x57cc9a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4 } -0xffda7088->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6fdf } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6fdf, 0xffda7088 } -Next token is token 'p' (0xffda7088 'p'Exception caught: cleaning lookahead and stack -0x56b373f4->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda7088 } -0x56b373e4->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7088 } -0x56b373d4->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda7088 } -0x56b373c4->Object::~Object { 0x56b373c4, 0xffda7088 } -0xffda7088->Object::~Object { 0xffda7088 } +0x57cc9ac0->Object::Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60, 0x57cc9a90 } +Next token is token 'p' (0x57cc9ac0 'p'Exception caught: cleaning lookahead and stack +0x57cc9ac0->Object::~Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60, 0x57cc9a90, 0x57cc9ac0 } +0x57cc9a90->Object::~Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60, 0x57cc9a90 } +0x57cc9a60->Object::~Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60 } +0x57cc9a30->Object::~Object { 0x57cc9a00, 0x57cc9a30 } +0x57cc9a00->Object::~Object { 0x57cc9a00 } exception caught: printer end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + +stdout: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xffda6fdf->Object::Object { } -0xffda7088->Object::Object { 0xffda6fdf } -0xffda6fdf->Object::~Object { 0xffda6fdf, 0xffda7088 } -Next token is token 'a' (0xffda7088 'a') -0xffda6ff0->Object::Object { 0xffda7088 } -0xffda6f7b->Object::Object { 0xffda6ff0, 0xffda7088 } -0xffda6f7b->Object::~Object { 0xffda6f7b, 0xffda6ff0, 0xffda7088 } -0xffda7088->Object::~Object { 0xffda6ff0, 0xffda7088 } -Shifting token 'a' (0xffda6ff0 'a') -0x56b373c4->Object::Object { 0xffda6ff0 } -0xffda6f7f->Object::Object { 0x56b373c4, 0xffda6ff0 } -0xffda6f7f->Object::~Object { 0x56b373c4, 0xffda6f7f, 0xffda6ff0 } -0xffda6ff0->Object::~Object { 0x56b373c4, 0xffda6ff0 } +0x57cc9a00->Object::Object { } +Next token is token 'a' (0x57cc9a00 'a') +Shifting token 'a' (0x57cc9a00 'a') Entering state 1 Stack now 0 1 -0xffda7098->Object::Object { 0x56b373c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56b373c4 'a') --> $$ = nterm item (0xffda7098 'a') -0x56b373c4->Object::~Object { 0x56b373c4, 0xffda7098 } -0x56b373c4->Object::Object { 0xffda7098 } -0xffda6fdf->Object::Object { 0x56b373c4, 0xffda7098 } -0xffda6fdf->Object::~Object { 0x56b373c4, 0xffda6fdf, 0xffda7098 } -0xffda7098->Object::~Object { 0x56b373c4, 0xffda7098 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57cc9a00 'a') +-> $$ = nterm item (0x57cc9a00 'a') Entering state 10 Stack now 0 10 Reading a token -0xffda6fdf->Object::Object { 0x56b373c4 } -0xffda7088->Object::Object { 0x56b373c4, 0xffda6fdf } -0xffda6fdf->Object::~Object { 0x56b373c4, 0xffda6fdf, 0xffda7088 } -Next token is token 'a' (0xffda7088 'a') -0xffda6ff0->Object::Object { 0x56b373c4, 0xffda7088 } -0xffda6f7b->Object::Object { 0x56b373c4, 0xffda6ff0, 0xffda7088 } -0xffda6f7b->Object::~Object { 0x56b373c4, 0xffda6f7b, 0xffda6ff0, 0xffda7088 } -0xffda7088->Object::~Object { 0x56b373c4, 0xffda6ff0, 0xffda7088 } -Shifting token 'a' (0xffda6ff0 'a') -0x56b373d4->Object::Object { 0x56b373c4, 0xffda6ff0 } -0xffda6f7f->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0 } -0xffda6f7f->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6f7f, 0xffda6ff0 } -0xffda6ff0->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0 } +0x57cc9a30->Object::Object { 0x57cc9a00 } +Next token is token 'a' (0x57cc9a30 'a') +Shifting token 'a' (0x57cc9a30 'a') Entering state 1 Stack now 0 10 1 -0xffda7098->Object::Object { 0x56b373c4, 0x56b373d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56b373d4 'a') --> $$ = nterm item (0xffda7098 'a') -0x56b373d4->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda7098 } -0x56b373d4->Object::Object { 0x56b373c4, 0xffda7098 } -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda7098 } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6fdf, 0xffda7098 } -0xffda7098->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda7098 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57cc9a30 'a') +-> $$ = nterm item (0x57cc9a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4 } -0xffda7088->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda6fdf } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6fdf, 0xffda7088 } -Next token is token 'a' (0xffda7088 'a') -0xffda6ff0->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda7088 } -0xffda6f7b->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0, 0xffda7088 } -0xffda6f7b->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6f7b, 0xffda6ff0, 0xffda7088 } -0xffda7088->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0, 0xffda7088 } -Shifting token 'a' (0xffda6ff0 'a') -0x56b373e4->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda6ff0 } -0xffda6f7f->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0 } -0xffda6f7f->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6f7f, 0xffda6ff0 } -0xffda6ff0->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0 } +0x57cc9a60->Object::Object { 0x57cc9a00, 0x57cc9a30 } +Next token is token 'a' (0x57cc9a60 'a') +Shifting token 'a' (0x57cc9a60 'a') Entering state 1 Stack now 0 10 10 1 -0xffda7098->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56b373e4 'a') --> $$ = nterm item (0xffda7098 'a') -0x56b373e4->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7098 } -0x56b373e4->Object::Object { 0x56b373c4, 0x56b373d4, 0xffda7098 } -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7098 } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6fdf, 0xffda7098 } -0xffda7098->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7098 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57cc9a60 'a') +-> $$ = nterm item (0x57cc9a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4 } -0xffda7088->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6fdf } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6fdf, 0xffda7088 } -Next token is token 'a' (0xffda7088 'a') -0xffda6ff0->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7088 } -0xffda6f7b->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0, 0xffda7088 } -0xffda6f7b->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6f7b, 0xffda6ff0, 0xffda7088 } -0xffda7088->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0, 0xffda7088 } -Shifting token 'a' (0xffda6ff0 'a') -0x56b373f4->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda6ff0 } -0xffda6f7f->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6ff0 } -0xffda6f7f->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6f7f, 0xffda6ff0 } -0xffda6ff0->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6ff0 } +0x57cc9a90->Object::Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60 } +Next token is token 'a' (0x57cc9a90 'a') +Shifting token 'a' (0x57cc9a90 'a') Entering state 1 Stack now 0 10 10 10 1 -0xffda7098->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56b373f4 'a') --> $$ = nterm item (0xffda7098 'a') -0x56b373f4->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda7098 } -0x56b373f4->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7098 } -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda7098 } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6fdf, 0xffda7098 } -0xffda7098->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda7098 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57cc9a90 'a') +-> $$ = nterm item (0x57cc9a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xffda6fdf->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4 } -0xffda7088->Object::Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6fdf } -0xffda6fdf->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda6fdf, 0xffda7088 } -Next token is token 'p' (0xffda7088 'p'Exception caught: cleaning lookahead and stack -0x56b373f4->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0x56b373f4, 0xffda7088 } -0x56b373e4->Object::~Object { 0x56b373c4, 0x56b373d4, 0x56b373e4, 0xffda7088 } -0x56b373d4->Object::~Object { 0x56b373c4, 0x56b373d4, 0xffda7088 } -0x56b373c4->Object::~Object { 0x56b373c4, 0xffda7088 } -0xffda7088->Object::~Object { 0xffda7088 } +0x57cc9ac0->Object::Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60, 0x57cc9a90 } +Next token is token 'p' (0x57cc9ac0 'p'Exception caught: cleaning lookahead and stack +0x57cc9ac0->Object::~Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60, 0x57cc9a90, 0x57cc9ac0 } +0x57cc9a90->Object::~Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60, 0x57cc9a90 } +0x57cc9a60->Object::~Object { 0x57cc9a00, 0x57cc9a30, 0x57cc9a60 } +0x57cc9a30->Object::~Object { 0x57cc9a00, 0x57cc9a30 } +0x57cc9a00->Object::~Object { 0x57cc9a00 } exception caught: printer end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -694. java.at:217: testing Java parser class modifiers ... -./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./c++.at:1361: grep '^exception caught: printer$' stderr +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stdout: exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae +./c++.at:1361: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +698. java.at:470: testing Java constructor init and init_throws ... +./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./java.at:219: grep '[mb]4_' YYParser.y +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaaT -694. java.at:217: stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (java.at:219) -./c++.at:1363: $PREPARSER ./input aaaaR +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1361: $PREPARSER ./input aaaaT stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +stderr: +stdout: +./c++.at:567: $here/modern +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -695. java.at:287: testing Java parser class extends and implements ... -./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./java.at:289: grep '[mb]4_' YYParser.y stdout: -695. java.at:287: skipped (java.at:289) +Modern C++: 202002 +./c++.at:567: $PREPARSER ./list +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./java.at:475: grep '[mb]4_' YYParser.y +stdout: +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +698. java.at:470: skipped (java.at:475) +699. java.at:497: testing Java value, position, and location types ... +./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -696. java.at:307: testing Java %parse-param and %lex-param ... -./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./java.at:309: grep '[mb]4_' YYParser.y +./java.at:499: grep '[mb]4_' YYParser.y stdout: -696. java.at:307: skipped (java.at:309) +699. java.at:497: skipped (java.at:499) +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +700. java.at:528: testing Java syntax error handling without error token ... +./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret YYParser.y stderr: stdout: -./c++.at:1361: $PREPARSER ./input aaaas +./c++.at:1363: $PREPARSER ./input aaaas stderr: exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal +stderr: +stdout: +./c++.at:570: $here/modern stderr: exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +Modern C++: 202002 +./c++.at:570: $PREPARSER ./list +./c++.at:1363: $PREPARSER ./input i +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) stderr: exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -697. java.at:381: testing Java throws specifications ... -./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1361: $PREPARSER ./input --debug aaaap +701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ... +700. java.at:528: ./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y + skipped (java.at:580) +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap + stderr: +stdout: +stderr: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -0x574c1a00->Object::Object { } -Next token is token 'a' (0x574c1a00 'a') -Shifting token 'a' (0x574c1a00 'a') +0xffe58d8f->Object::Object { } +0xffe58e38->Object::Object { 0xffe58d8f } +0xffe58d8f->Object::~Object { 0xffe58d8f, 0xffe58e38 } +Next token is token 'a' (0xffe58e38 'a') +0xffe58da0->Object::Object { 0xffe58e38 } +0xffe58d2b->Object::Object { 0xffe58da0, 0xffe58e38 } +0xffe58d2b->Object::~Object { 0xffe58d2b, 0xffe58da0, 0xffe58e38 } +0xffe58e38->Object::~Object { 0xffe58da0, 0xffe58e38 } +Shifting token 'a' (0xffe58da0 'a') +0x566803c4->Object::Object { 0xffe58da0 } +0xffe58d2f->Object::Object { 0x566803c4, 0xffe58da0 } +0xffe58d2f->Object::~Object { 0x566803c4, 0xffe58d2f, 0xffe58da0 } +0xffe58da0->Object::~Object { 0x566803c4, 0xffe58da0 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x574c1a00 'a') --> $$ = nterm item (0x574c1a00 'a') +0xffe58e48->Object::Object { 0x566803c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566803c4 'a') +-> $$ = nterm item (0xffe58e48 'a') +0x566803c4->Object::~Object { 0x566803c4, 0xffe58e48 } +0x566803c4->Object::Object { 0xffe58e48 } +0xffe58d8f->Object::Object { 0x566803c4, 0xffe58e48 } +0xffe58d8f->Object::~Object { 0x566803c4, 0xffe58d8f, 0xffe58e48 } +0xffe58e48->Object::~Object { 0x566803c4, 0xffe58e48 } Entering state 10 Stack now 0 10 Reading a token -0x574c1a30->Object::Object { 0x574c1a00 } -Next token is token 'a' (0x574c1a30 'a') -Shifting token 'a' (0x574c1a30 'a') +0xffe58d8f->Object::Object { 0x566803c4 } +0xffe58e38->Object::Object { 0x566803c4, 0xffe58d8f } +0xffe58d8f->Object::~Object { 0x566803c4, 0xffe58d8f, 0xffe58e38 } +Next token is token 'a' (0xffe58e38 'a') +0xffe58da0->Object::Object { 0x566803c4, 0xffe58e38 } +0xffe58d2b->Object::Object { 0x566803c4, 0xffe58da0, 0xffe58e38 } +0xffe58d2b->Object::~Object { 0x566803c4, 0xffe58d2b, 0xffe58da0, 0xffe58e38 } +0xffe58e38->Object::~Object { 0x566803c4, 0xffe58da0, 0xffe58e38 } +Shifting token 'a' (0xffe58da0 'a') +0x566803d4->Object::Object { 0x566803c4, 0xffe58da0 } +0xffe58d2f->Object::Object { 0x566803c4, 0x566803d4, 0xffe58da0 } +0xffe58d2f->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58d2f, 0xffe58da0 } +0xffe58da0->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58da0 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x574c1a30 'a') --> $$ = nterm item (0x574c1a30 'a') +0xffe58e48->Object::Object { 0x566803c4, 0x566803d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566803d4 'a') +-> $$ = nterm item (0xffe58e48 'a') +0x566803d4->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58e48 } +0x566803d4->Object::Object { 0x566803c4, 0xffe58e48 } +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0xffe58e48 } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58d8f, 0xffe58e48 } +0xffe58e48->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58e48 } Entering state 10 Stack now 0 10 10 Reading a token -0x574c1a60->Object::Object { 0x574c1a00, 0x574c1a30 } -Next token is token 'a' (0x574c1a60 'a') -Shifting token 'a' (0x574c1a60 'a') +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4 } +0xffe58e38->Object::Object { 0x566803c4, 0x566803d4, 0xffe58d8f } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58d8f, 0xffe58e38 } +Next token is token 'a' (0xffe58e38 'a') +0xffe58da0->Object::Object { 0x566803c4, 0x566803d4, 0xffe58e38 } +0xffe58d2b->Object::Object { 0x566803c4, 0x566803d4, 0xffe58da0, 0xffe58e38 } +0xffe58d2b->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58d2b, 0xffe58da0, 0xffe58e38 } +0xffe58e38->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58da0, 0xffe58e38 } +Shifting token 'a' (0xffe58da0 'a') +0x566803e4->Object::Object { 0x566803c4, 0x566803d4, 0xffe58da0 } +0xffe58d2f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0 } +0xffe58d2f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d2f, 0xffe58da0 } +0xffe58da0->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x574c1a60 'a') --> $$ = nterm item (0x574c1a60 'a') +0xffe58e48->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566803e4 'a') +-> $$ = nterm item (0xffe58e48 'a') +0x566803e4->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e48 } +0x566803e4->Object::Object { 0x566803c4, 0x566803d4, 0xffe58e48 } +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e48 } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d8f, 0xffe58e48 } +0xffe58e48->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e48 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x574c1a90->Object::Object { 0x574c1a00, 0x574c1a30, 0x574c1a60 } -Next token is token 'a' (0x574c1a90 'a') -Shifting token 'a' (0x574c1a90 'a') +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4 } +0xffe58e38->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d8f } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d8f, 0xffe58e38 } +Next token is token 'a' (0xffe58e38 'a') +0xffe58da0->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e38 } +0xffe58d2b->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0, 0xffe58e38 } +0xffe58d2b->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d2b, 0xffe58da0, 0xffe58e38 } +0xffe58e38->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0, 0xffe58e38 } +Shifting token 'a' (0xffe58da0 'a') +0x566803f4->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0 } +0xffe58d2f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58da0 } +0xffe58d2f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58d2f, 0xffe58da0 } +0xffe58da0->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58da0 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x574c1a90 'a') --> $$ = nterm item (0x574c1a90 'a') +0xffe58e48->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566803f4 'a') +-> $$ = nterm item (0xffe58e48 'a') +0x566803f4->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58e48 } +0x566803f4->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e48 } +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58e48 } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58d8f, 0xffe58e48 } +0xffe58e48->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58e48 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x574c1ac0->Object::Object { 0x574c1a00, 0x574c1a30, 0x574c1a60, 0x574c1a90 } -Next token is token 'p' (0x574c1ac0 'p'Exception caught: cleaning lookahead and stack -0x574c1ac0->Object::~Object { 0x574c1a00, 0x574c1a30, 0x574c1a60, 0x574c1a90, 0x574c1ac0 } -0x574c1a90->Object::~Object { 0x574c1a00, 0x574c1a30, 0x574c1a60, 0x574c1a90 } -0x574c1a60->Object::~Object { 0x574c1a00, 0x574c1a30, 0x574c1a60 } -0x574c1a30->Object::~Object { 0x574c1a00, 0x574c1a30 } -0x574c1a00->Object::~Object { 0x574c1a00 } +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4 } +0xffe58e38->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58d8f } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58d8f, 0xffe58e38 } +Next token is token 'p' (0xffe58e38 'p'Exception caught: cleaning lookahead and stack +0x566803f4->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58e38 } +0x566803e4->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e38 } +0x566803d4->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58e38 } +0x566803c4->Object::~Object { 0x566803c4, 0xffe58e38 } +0xffe58e38->Object::~Object { 0xffe58e38 } exception caught: printer end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x574c1a00->Object::Object { } -Next token is token 'a' (0x574c1a00 'a') -Shifting token 'a' (0x574c1a00 'a') +0xffe58d8f->Object::Object { } +0xffe58e38->Object::Object { 0xffe58d8f } +0xffe58d8f->Object::~Object { 0xffe58d8f, 0xffe58e38 } +Next token is token 'a' (0xffe58e38 'a') +0xffe58da0->Object::Object { 0xffe58e38 } +0xffe58d2b->Object::Object { 0xffe58da0, 0xffe58e38 } +0xffe58d2b->Object::~Object { 0xffe58d2b, 0xffe58da0, 0xffe58e38 } +0xffe58e38->Object::~Object { 0xffe58da0, 0xffe58e38 } +Shifting token 'a' (0xffe58da0 'a') +0x566803c4->Object::Object { 0xffe58da0 } +0xffe58d2f->Object::Object { 0x566803c4, 0xffe58da0 } +0xffe58d2f->Object::~Object { 0x566803c4, 0xffe58d2f, 0xffe58da0 } +0xffe58da0->Object::~Object { 0x566803c4, 0xffe58da0 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x574c1a00 'a') --> $$ = nterm item (0x574c1a00 'a') +0xffe58e48->Object::Object { 0x566803c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566803c4 'a') +-> $$ = nterm item (0xffe58e48 'a') +0x566803c4->Object::~Object { 0x566803c4, 0xffe58e48 } +0x566803c4->Object::Object { 0xffe58e48 } +0xffe58d8f->Object::Object { 0x566803c4, 0xffe58e48 } +0xffe58d8f->Object::~Object { 0x566803c4, 0xffe58d8f, 0xffe58e48 } +0xffe58e48->Object::~Object { 0x566803c4, 0xffe58e48 } Entering state 10 Stack now 0 10 Reading a token -0x574c1a30->Object::Object { 0x574c1a00 } -Next token is token 'a' (0x574c1a30 'a') -Shifting token 'a' (0x574c1a30 'a') +0xffe58d8f->Object::Object { 0x566803c4 } +0xffe58e38->Object::Object { 0x566803c4, 0xffe58d8f } +0xffe58d8f->Object::~Object { 0x566803c4, 0xffe58d8f, 0xffe58e38 } +Next token is token 'a' (0xffe58e38 'a') +0xffe58da0->Object::Object { 0x566803c4, 0xffe58e38 } +0xffe58d2b->Object::Object { 0x566803c4, 0xffe58da0, 0xffe58e38 } +0xffe58d2b->Object::~Object { 0x566803c4, 0xffe58d2b, 0xffe58da0, 0xffe58e38 } +0xffe58e38->Object::~Object { 0x566803c4, 0xffe58da0, 0xffe58e38 } +Shifting token 'a' (0xffe58da0 'a') +0x566803d4->Object::Object { 0x566803c4, 0xffe58da0 } +0xffe58d2f->Object::Object { 0x566803c4, 0x566803d4, 0xffe58da0 } +0xffe58d2f->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58d2f, 0xffe58da0 } +0xffe58da0->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58da0 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x574c1a30 'a') --> $$ = nterm item (0x574c1a30 'a') +0xffe58e48->Object::Object { 0x566803c4, 0x566803d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566803d4 'a') +-> $$ = nterm item (0xffe58e48 'a') +0x566803d4->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58e48 } +0x566803d4->Object::Object { 0x566803c4, 0xffe58e48 } +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0xffe58e48 } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58d8f, 0xffe58e48 } +0xffe58e48->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58e48 } Entering state 10 Stack now 0 10 10 Reading a token -0x574c1a60->Object::Object { 0x574c1a00, 0x574c1a30 } -Next token is token 'a' (0x574c1a60 'a') -Shifting token 'a' (0x574c1a60 'a') +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4 } +0xffe58e38->Object::Object { 0x566803c4, 0x566803d4, 0xffe58d8f } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58d8f, 0xffe58e38 } +Next token is token 'a' (0xffe58e38 'a') +0xffe58da0->Object::Object { 0x566803c4, 0x566803d4, 0xffe58e38 } +0xffe58d2b->Object::Object { 0x566803c4, 0x566803d4, 0xffe58da0, 0xffe58e38 } +0xffe58d2b->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58d2b, 0xffe58da0, 0xffe58e38 } +0xffe58e38->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58da0, 0xffe58e38 } +Shifting token 'a' (0xffe58da0 'a') +0x566803e4->Object::Object { 0x566803c4, 0x566803d4, 0xffe58da0 } +0xffe58d2f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0 } +0xffe58d2f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d2f, 0xffe58da0 } +0xffe58da0->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x574c1a60 'a') --> $$ = nterm item (0x574c1a60 'a') +0xffe58e48->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566803e4 'a') +-> $$ = nterm item (0xffe58e48 'a') +0x566803e4->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e48 } +0x566803e4->Object::Object { 0x566803c4, 0x566803d4, 0xffe58e48 } +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e48 } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d8f, 0xffe58e48 } +0xffe58e48->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e48 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x574c1a90->Object::Object { 0x574c1a00, 0x574c1a30, 0x574c1a60 } -Next token is token 'a' (0x574c1a90 'a') -Shifting token 'a' (0x574c1a90 'a') +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4 } +0xffe58e38->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d8f } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d8f, 0xffe58e38 } +Next token is token 'a' (0xffe58e38 'a') +0xffe58da0->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e38 } +0xffe58d2b->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0, 0xffe58e38 } +0xffe58d2b->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58d2b, 0xffe58da0, 0xffe58e38 } +0xffe58e38->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0, 0xffe58e38 } +Shifting token 'a' (0xffe58da0 'a') +0x566803f4->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58da0 } +0xffe58d2f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58da0 } +0xffe58d2f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58d2f, 0xffe58da0 } +0xffe58da0->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58da0 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x574c1a90 'a') --> $$ = nterm item (0x574c1a90 'a') +0xffe58e48->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566803f4 'a') +-> $$ = nterm item (0xffe58e48 'a') +0x566803f4->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58e48 } +0x566803f4->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e48 } +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58e48 } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58d8f, 0xffe58e48 } +0xffe58e48->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58e48 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x574c1ac0->Object::Object { 0x574c1a00, 0x574c1a30, 0x574c1a60, 0x574c1a90 } -Next token is token 'p' (0x574c1ac0 'p'Exception caught: cleaning lookahead and stack -0x574c1ac0->Object::~Object { 0x574c1a00, 0x574c1a30, 0x574c1a60, 0x574c1a90, 0x574c1ac0 } -0x574c1a90->Object::~Object { 0x574c1a00, 0x574c1a30, 0x574c1a60, 0x574c1a90 } -0x574c1a60->Object::~Object { 0x574c1a00, 0x574c1a30, 0x574c1a60 } -0x574c1a30->Object::~Object { 0x574c1a00, 0x574c1a30 } -0x574c1a00->Object::~Object { 0x574c1a00 } +0xffe58d8f->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4 } +0xffe58e38->Object::Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58d8f } +0xffe58d8f->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58d8f, 0xffe58e38 } +Next token is token 'p' (0xffe58e38 'p'Exception caught: cleaning lookahead and stack +0x566803f4->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0x566803f4, 0xffe58e38 } +0x566803e4->Object::~Object { 0x566803c4, 0x566803d4, 0x566803e4, 0xffe58e38 } +0x566803d4->Object::~Object { 0x566803c4, 0x566803d4, 0xffe58e38 } +0x566803c4->Object::~Object { 0x566803c4, 0xffe58e38 } +0xffe58e38->Object::~Object { 0xffe58e38 } exception caught: printer end { } -./c++.at:1361: grep '^exception caught: printer$' stderr +./c++.at:1363: grep '^exception caught: printer$' stderr stdout: exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1363: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaE -./java.at:441: grep '[mb]4_' YYParser.y +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1361: $PREPARSER ./input aaaaT -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -697. java.at:381: ./c++.at:1361: $PREPARSER ./input aaaaR - skipped (java.at:441) +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' - -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java +stdout: stderr: +./c++.at:1363: $PREPARSER ./input aaaaT +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stdout: ./c++.at:1362: $PREPARSER ./input aaaas stderr: +./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: exception caught: reduction +./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y +./c++.at:1363: $PREPARSER ./input aaaaR ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -698. java.at:470: testing Java constructor init and init_throws ... -./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaal -./java.at:475: grep '[mb]4_' YYParser.y +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: exception caught: yylex ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -698. java.at:470: ./c++.at:1362: $PREPARSER ./input i - skipped (java.at:475) - -stderr: -stdout: -./c++.at:1555: $PREPARSER ./test +./c++.at:1362: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaap stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -699. java.at:497: testing Java value, position, and location types ... -./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./java.at:499: grep '[mb]4_' YYParser.y -stdout: -stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +702. javapush.at:217: testing Trivial Push Parser with %initial-action ... +./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y ./c++.at:1362: $PREPARSER ./input --debug aaaap -699. java.at:497: skipped (java.at:499) - -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xfffabfef->Object::Object { } -0xfffac098->Object::Object { 0xfffabfef } -0xfffabfef->Object::~Object { 0xfffabfef, 0xfffac098 } -Next token is token 'a' (0xfffac098 'a') -0xfffabfe8->Object::Object { 0xfffac098 } -0xfffac098->Object::~Object { 0xfffabfe8, 0xfffac098 } -Shifting token 'a' (0xfffabfe8 'a') -0x573ba3c4->Object::Object { 0xfffabfe8 } -0xfffabfe8->Object::~Object { 0x573ba3c4, 0xfffabfe8 } +0xff9beedf->Object::Object { } +0xff9bef88->Object::Object { 0xff9beedf } +0xff9beedf->Object::~Object { 0xff9beedf, 0xff9bef88 } +Next token is token 'a' (0xff9bef88 'a') +0xff9beef0->Object::Object { 0xff9bef88 } +0xff9bee7b->Object::Object { 0xff9beef0, 0xff9bef88 } +0xff9bee7b->Object::~Object { 0xff9bee7b, 0xff9beef0, 0xff9bef88 } +0xff9bef88->Object::~Object { 0xff9beef0, 0xff9bef88 } +Shifting token 'a' (0xff9beef0 'a') +0x584c23c4->Object::Object { 0xff9beef0 } +0xff9bee7f->Object::Object { 0x584c23c4, 0xff9beef0 } +0xff9bee7f->Object::~Object { 0x584c23c4, 0xff9bee7f, 0xff9beef0 } +0xff9beef0->Object::~Object { 0x584c23c4, 0xff9beef0 } Entering state 2 Stack now 0 2 -0xfffac0a8->Object::Object { 0x573ba3c4 } +0xff9bef98->Object::Object { 0x584c23c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x573ba3c4 'a') --> $$ = nterm item (0xfffac0a8 'a') -0x573ba3c4->Object::~Object { 0x573ba3c4, 0xfffac0a8 } -0x573ba3c4->Object::Object { 0xfffac0a8 } -0xfffac0a8->Object::~Object { 0x573ba3c4, 0xfffac0a8 } + $1 = token 'a' (0x584c23c4 'a') +-> $$ = nterm item (0xff9bef98 'a') +0x584c23c4->Object::~Object { 0x584c23c4, 0xff9bef98 } +0x584c23c4->Object::Object { 0xff9bef98 } +0xff9beedf->Object::Object { 0x584c23c4, 0xff9bef98 } +0xff9beedf->Object::~Object { 0x584c23c4, 0xff9beedf, 0xff9bef98 } +0xff9bef98->Object::~Object { 0x584c23c4, 0xff9bef98 } Entering state 11 Stack now 0 11 Reading a token -0xfffabfef->Object::Object { 0x573ba3c4 } -0xfffac098->Object::Object { 0x573ba3c4, 0xfffabfef } -0xfffabfef->Object::~Object { 0x573ba3c4, 0xfffabfef, 0xfffac098 } -Next token is token 'a' (0xfffac098 'a') -0xfffabfe8->Object::Object { 0x573ba3c4, 0xfffac098 } -0xfffac098->Object::~Object { 0x573ba3c4, 0xfffabfe8, 0xfffac098 } -Shifting token 'a' (0xfffabfe8 'a') -0x573ba3d4->Object::Object { 0x573ba3c4, 0xfffabfe8 } -0xfffabfe8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfe8 } +0xff9beedf->Object::Object { 0x584c23c4 } +0xff9bef88->Object::Object { 0x584c23c4, 0xff9beedf } +0xff9beedf->Object::~Object { 0x584c23c4, 0xff9beedf, 0xff9bef88 } +Next token is token 'a' (0xff9bef88 'a') +0xff9beef0->Object::Object { 0x584c23c4, 0xff9bef88 } +0xff9bee7b->Object::Object { 0x584c23c4, 0xff9beef0, 0xff9bef88 } +0xff9bee7b->Object::~Object { 0x584c23c4, 0xff9bee7b, 0xff9beef0, 0xff9bef88 } +0xff9bef88->Object::~Object { 0x584c23c4, 0xff9beef0, 0xff9bef88 } +Shifting token 'a' (0xff9beef0 'a') +0x584c23d4->Object::Object { 0x584c23c4, 0xff9beef0 } +0xff9bee7f->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9beef0 } +0xff9bee7f->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bee7f, 0xff9beef0 } +0xff9beef0->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9beef0 } Entering state 2 Stack now 0 11 2 -0xfffac0a8->Object::Object { 0x573ba3c4, 0x573ba3d4 } +0xff9bef98->Object::Object { 0x584c23c4, 0x584c23d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x573ba3d4 'a') --> $$ = nterm item (0xfffac0a8 'a') -0x573ba3d4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffac0a8 } -0x573ba3d4->Object::Object { 0x573ba3c4, 0xfffac0a8 } -0xfffac0a8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffac0a8 } + $1 = token 'a' (0x584c23d4 'a') +-> $$ = nterm item (0xff9bef98 'a') +0x584c23d4->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bef98 } +0x584c23d4->Object::Object { 0x584c23c4, 0xff9bef98 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9bef98 } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9beedf, 0xff9bef98 } +0xff9bef98->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bef98 } Entering state 11 Stack now 0 11 11 Reading a token -0xfffabfef->Object::Object { 0x573ba3c4, 0x573ba3d4 } -0xfffac098->Object::Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfef } -0xfffabfef->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfef, 0xfffac098 } -Next token is token 'a' (0xfffac098 'a') -0xfffabfe8->Object::Object { 0x573ba3c4, 0x573ba3d4, 0xfffac098 } -0xfffac098->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfe8, 0xfffac098 } -Shifting token 'a' (0xfffabfe8 'a') -0x573ba3e4->Object::Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfe8 } -0xfffabfe8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfe8 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4 } +0xff9bef88->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9beedf } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9beedf, 0xff9bef88 } +Next token is token 'a' (0xff9bef88 'a') +0xff9beef0->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9bef88 } +0xff9bee7b->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9beef0, 0xff9bef88 } +0xff9bee7b->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bee7b, 0xff9beef0, 0xff9bef88 } +0xff9bef88->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9beef0, 0xff9bef88 } +Shifting token 'a' (0xff9beef0 'a') +0x584c23e4->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9beef0 } +0xff9bee7f->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0 } +0xff9bee7f->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bee7f, 0xff9beef0 } +0xff9beef0->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0 } Entering state 2 Stack now 0 11 11 2 -0xfffac0a8->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4 } +0xff9bef98->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x573ba3e4 'a') --> $$ = nterm item (0xfffac0a8 'a') -0x573ba3e4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac0a8 } -0x573ba3e4->Object::Object { 0x573ba3c4, 0x573ba3d4, 0xfffac0a8 } -0xfffac0a8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac0a8 } + $1 = token 'a' (0x584c23e4 'a') +-> $$ = nterm item (0xff9bef98 'a') +0x584c23e4->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef98 } +0x584c23e4->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9bef98 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef98 } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beedf, 0xff9bef98 } +0xff9bef98->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef98 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xfffabfef->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4 } -0xfffac098->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfef } -0xfffabfef->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfef, 0xfffac098 } -Next token is token 'a' (0xfffac098 'a') -0xfffabfe8->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac098 } -0xfffac098->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfe8, 0xfffac098 } -Shifting token 'a' (0xfffabfe8 'a') -0x573ba3f4->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfe8 } -0xfffabfe8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffabfe8 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4 } +0xff9bef88->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beedf } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beedf, 0xff9bef88 } +Next token is token 'a' (0xff9bef88 'a') +0xff9beef0->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef88 } +0xff9bee7b->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0, 0xff9bef88 } +0xff9bee7b->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bee7b, 0xff9beef0, 0xff9bef88 } +0xff9bef88->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0, 0xff9bef88 } +Shifting token 'a' (0xff9beef0 'a') +0x584c23f4->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0 } +0xff9bee7f->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beef0 } +0xff9bee7f->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bee7f, 0xff9beef0 } +0xff9beef0->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beef0 } Entering state 2 Stack now 0 11 11 11 2 -0xfffac0a8->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4 } +0xff9bef98->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x573ba3f4 'a') --> $$ = nterm item (0xfffac0a8 'a') -0x573ba3f4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffac0a8 } -0x573ba3f4->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac0a8 } -0xfffac0a8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffac0a8 } + $1 = token 'a' (0x584c23f4 'a') +-> $$ = nterm item (0xff9bef98 'a') +0x584c23f4->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bef98 } +0x584c23f4->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef98 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bef98 } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beedf, 0xff9bef98 } +0xff9bef98->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bef98 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xfffabfef->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4 } -0xfffac098->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffabfef } -0xfffabfef->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffabfef, 0xfffac098 } -Next token is token 'p' (0xfffac098 'p'Exception caught: cleaning lookahead and stack -0x573ba3f4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffac098 } -0x573ba3e4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac098 } -0x573ba3d4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffac098 } -0x573ba3c4->Object::~Object { 0x573ba3c4, 0xfffac098 } -0xfffac098->Object::~Object { 0xfffac098 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4 } +0xff9bef88->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beedf } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beedf, 0xff9bef88 } +Next token is token 'p' (0xff9bef88 'p'Exception caught: cleaning lookahead and stack +0x584c23f4->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bef88 } +0x584c23e4->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef88 } +0x584c23d4->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bef88 } +0x584c23c4->Object::~Object { 0x584c23c4, 0xff9bef88 } +0xff9bef88->Object::~Object { 0xff9bef88 } exception caught: printer end { } -700. java.at:528: testing Java syntax error handling without error token ... -./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret YYParser.y +./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xfffabfef->Object::Object { } -0xfffac098->Object::Object { 0xfffabfef } -0xfffabfef->Object::~Object { 0xfffabfef, 0xfffac098 } -Next token is token 'a' (0xfffac098 'a') -0xfffabfe8->Object::Object { 0xfffac098 } -0xfffac098->Object::~Object { 0xfffabfe8, 0xfffac098 } -Shifting token 'a' (0xfffabfe8 'a') -0x573ba3c4->Object::Object { 0xfffabfe8 } -0xfffabfe8->Object::~Object { 0x573ba3c4, 0xfffabfe8 } +0xff9beedf->Object::Object { } +0xff9bef88->Object::Object { 0xff9beedf } +0xff9beedf->Object::~Object { 0xff9beedf, 0xff9bef88 } +Next token is token 'a' (0xff9bef88 'a') +0xff9beef0->Object::Object { 0xff9bef88 } +0xff9bee7b->Object::Object { 0xff9beef0, 0xff9bef88 } +0xff9bee7b->Object::~Object { 0xff9bee7b, 0xff9beef0, 0xff9bef88 } +0xff9bef88->Object::~Object { 0xff9beef0, 0xff9bef88 } +Shifting token 'a' (0xff9beef0 'a') +0x584c23c4->Object::Object { 0xff9beef0 } +0xff9bee7f->Object::Object { 0x584c23c4, 0xff9beef0 } +0xff9bee7f->Object::~Object { 0x584c23c4, 0xff9bee7f, 0xff9beef0 } +0xff9beef0->Object::~Object { 0x584c23c4, 0xff9beef0 } Entering state 2 Stack now 0 2 -0xfffac0a8->Object::Object { 0x573ba3c4 } +0xff9bef98->Object::Object { 0x584c23c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x573ba3c4 'a') --> $$ = nterm item (0xfffac0a8 'a') -0x573ba3c4->Object::~Object { 0x573ba3c4, 0xfffac0a8 } -0x573ba3c4->Object::Object { 0xfffac0a8 } -0xfffac0a8->Object::~Object { 0x573ba3c4, 0xfffac0a8 } + $1 = token 'a' (0x584c23c4 'a') +-> $$ = nterm item (0xff9bef98 'a') +0x584c23c4->Object::~Object { 0x584c23c4, 0xff9bef98 } +0x584c23c4->Object::Object { 0xff9bef98 } +0xff9beedf->Object::Object { 0x584c23c4, 0xff9bef98 } +0xff9beedf->Object::~Object { 0x584c23c4, 0xff9beedf, 0xff9bef98 } +0xff9bef98->Object::~Object { 0x584c23c4, 0xff9bef98 } Entering state 11 Stack now 0 11 Reading a token -0xfffabfef->Object::Object { 0x573ba3c4 } -0xfffac098->Object::Object { 0x573ba3c4, 0xfffabfef } -0xfffabfef->Object::~Object { 0x573ba3c4, 0xfffabfef, 0xfffac098 } -Next token is token 'a' (0xfffac098 'a') -0xfffabfe8->Object::Object { 0x573ba3c4, 0xfffac098 } -0xfffac098->Object::~Object { 0x573ba3c4, 0xfffabfe8, 0xfffac098 } -Shifting token 'a' (0xfffabfe8 'a') -0x573ba3d4->Object::Object { 0x573ba3c4, 0xfffabfe8 } -0xfffabfe8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfe8 } +0xff9beedf->Object::Object { 0x584c23c4 } +0xff9bef88->Object::Object { 0x584c23c4, 0xff9beedf } +0xff9beedf->Object::~Object { 0x584c23c4, 0xff9beedf, 0xff9bef88 } +Next token is token 'a' (0xff9bef88 'a') +0xff9beef0->Object::Object { 0x584c23c4, 0xff9bef88 } +0xff9bee7b->Object::Object { 0x584c23c4, 0xff9beef0, 0xff9bef88 } +0xff9bee7b->Object::~Object { 0x584c23c4, 0xff9bee7b, 0xff9beef0, 0xff9bef88 } +0xff9bef88->Object::~Object { 0x584c23c4, 0xff9beef0, 0xff9bef88 } +Shifting token 'a' (0xff9beef0 'a') +0x584c23d4->Object::Object { 0x584c23c4, 0xff9beef0 } +0xff9bee7f->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9beef0 } +0xff9bee7f->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bee7f, 0xff9beef0 } +0xff9beef0->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9beef0 } Entering state 2 Stack now 0 11 2 -0xfffac0a8->Object::Object { 0x573ba3c4, 0x573ba3d4 } +0xff9bef98->Object::Object { 0x584c23c4, 0x584c23d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x573ba3d4 'a') --> $$ = nterm item (0xfffac0a8 'a') -0x573ba3d4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffac0a8 } -0x573ba3d4->Object::Object { 0x573ba3c4, 0xfffac0a8 } -0xfffac0a8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffac0a8 } + $1 = token 'a' (0x584c23d4 'a') +-> $$ = nterm item (0xff9bef98 'a') +0x584c23d4->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bef98 } +0x584c23d4->Object::Object { 0x584c23c4, 0xff9bef98 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9bef98 } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9beedf, 0xff9bef98 } +0xff9bef98->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bef98 } Entering state 11 Stack now 0 11 11 Reading a token -0xfffabfef->Object::Object { 0x573ba3c4, 0x573ba3d4 } -0xfffac098->Object::Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfef } -0xfffabfef->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfef, 0xfffac098 } -Next token is token 'a' (0xfffac098 'a') -0xfffabfe8->Object::Object { 0x573ba3c4, 0x573ba3d4, 0xfffac098 } -0xfffac098->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfe8, 0xfffac098 } -Shifting token 'a' (0xfffabfe8 'a') -0x573ba3e4->Object::Object { 0x573ba3c4, 0x573ba3d4, 0xfffabfe8 } -0xfffabfe8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfe8 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4 } +0xff9bef88->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9beedf } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9beedf, 0xff9bef88 } +Next token is token 'a' (0xff9bef88 'a') +0xff9beef0->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9bef88 } +0xff9bee7b->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9beef0, 0xff9bef88 } +0xff9bee7b->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bee7b, 0xff9beef0, 0xff9bef88 } +0xff9bef88->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9beef0, 0xff9bef88 } +Shifting token 'a' (0xff9beef0 'a') +0x584c23e4->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9beef0 } +0xff9bee7f->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0 } +0xff9bee7f->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bee7f, 0xff9beef0 } +0xff9beef0->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0 } Entering state 2 Stack now 0 11 11 2 -0xfffac0a8->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4 } +0xff9bef98->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x573ba3e4 'a') --> $$ = nterm item (0xfffac0a8 'a') -0x573ba3e4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac0a8 } -0x573ba3e4->Object::Object { 0x573ba3c4, 0x573ba3d4, 0xfffac0a8 } -0xfffac0a8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac0a8 } + $1 = token 'a' (0x584c23e4 'a') +-> $$ = nterm item (0xff9bef98 'a') +0x584c23e4->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef98 } +0x584c23e4->Object::Object { 0x584c23c4, 0x584c23d4, 0xff9bef98 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef98 } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beedf, 0xff9bef98 } +0xff9bef98->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef98 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xfffabfef->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4 } -0xfffac098->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfef } -0xfffabfef->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfef, 0xfffac098 } -Next token is token 'a' (0xfffac098 'a') -0xfffabfe8->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac098 } -0xfffac098->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfe8, 0xfffac098 } -Shifting token 'a' (0xfffabfe8 'a') -0x573ba3f4->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffabfe8 } -0xfffabfe8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffabfe8 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4 } +0xff9bef88->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beedf } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beedf, 0xff9bef88 } +Next token is token 'a' (0xff9bef88 'a') +0xff9beef0->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef88 } +0xff9bee7b->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0, 0xff9bef88 } +0xff9bee7b->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bee7b, 0xff9beef0, 0xff9bef88 } +0xff9bef88->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0, 0xff9bef88 } +Shifting token 'a' (0xff9beef0 'a') +0x584c23f4->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9beef0 } +0xff9bee7f->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beef0 } +0xff9bee7f->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bee7f, 0xff9beef0 } +0xff9beef0->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beef0 } Entering state 2 Stack now 0 11 11 11 2 -0xfffac0a8->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4 } +0xff9bef98->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x573ba3f4 'a') --> $$ = nterm item (0xfffac0a8 'a') -0x573ba3f4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffac0a8 } -0x573ba3f4->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac0a8 } -0xfffac0a8->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffac0a8 } + $1 = token 'a' (0x584c23f4 'a') +-> $$ = nterm item (0xff9bef98 'a') +0x584c23f4->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bef98 } +0x584c23f4->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef98 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bef98 } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beedf, 0xff9bef98 } +0xff9bef98->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bef98 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xfffabfef->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4 } -0xfffac098->Object::Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffabfef } -0xfffabfef->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffabfef, 0xfffac098 } -Next token is token 'p' (0xfffac098 'p'Exception caught: cleaning lookahead and stack -0x573ba3f4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0x573ba3f4, 0xfffac098 } -0x573ba3e4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0x573ba3e4, 0xfffac098 } -0x573ba3d4->Object::~Object { 0x573ba3c4, 0x573ba3d4, 0xfffac098 } -0x573ba3c4->Object::~Object { 0x573ba3c4, 0xfffac098 } -0xfffac098->Object::~Object { 0xfffac098 } +0xff9beedf->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4 } +0xff9bef88->Object::Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beedf } +0xff9beedf->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9beedf, 0xff9bef88 } +Next token is token 'p' (0xff9bef88 'p'Exception caught: cleaning lookahead and stack +0x584c23f4->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0x584c23f4, 0xff9bef88 } +0x584c23e4->Object::~Object { 0x584c23c4, 0x584c23d4, 0x584c23e4, 0xff9bef88 } +0x584c23d4->Object::~Object { 0x584c23c4, 0x584c23d4, 0xff9bef88 } +0x584c23c4->Object::~Object { 0x584c23c4, 0xff9bef88 } +0xff9bef88->Object::~Object { 0xff9bef88 } exception caught: printer end { } +./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y ./c++.at:1362: grep '^exception caught: printer$' stderr -700. java.at:528: stdout: +stdout: exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae - skipped (java.at:580) - +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: exception caught: syntax error -701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ... -./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java -stderr: -stderr: -stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java ./c++.at:1362: $PREPARSER ./input aaaaE -./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y +702. javapush.at:217: skipped (javapush.at:230) +./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java ./c++.at:1362: $PREPARSER ./input aaaaT stderr: -stderr: + ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -689. c++.at:1371: ok -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java ./c++.at:1362: $PREPARSER ./input aaaaR -======== Testing with C++ standard flags: '' - -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java -702. javapush.at:217: testing Trivial Push Parser with %initial-action ... -./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y -./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java +701. javapush.at:172: ======== Testing with C++ standard flags: '' + skipped (javapush.at:207) ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y -702. javapush.at:217: skipped (javapush.at:230) -./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java -./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +stderr: +stdout: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 703. d.at:103: testing D parser class extends and implements ... ./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +704. d.at:138: testing D parser class api.token.raw true by default ... +./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./d.at:106: grep '[mb]4_' YYParser.y stdout: +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS 703. d.at:103: skipped (d.at:106) -701. javapush.at:172: - skipped (javapush.at:207) -704. d.at:138: testing D parser class api.token.raw true by default ... -./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ... ./d.at:141: grep '[mb]4_' YYParser.y -./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stdout: 704. d.at:138: skipped (d.at:141) + +stderr: +stdout: +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ... +./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +706. cxx-type.at:415: testing GLR: Resolve ambiguity, impure, locations ... +./cxx-type.at:416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +stderr: +stdout: +./c++.at:1555: $PREPARSER ./test +stderr: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: types.y:77.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples - ./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -706. cxx-type.at:415: testing GLR: Resolve ambiguity, impure, locations ... -./cxx-type.at:416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: types.y:87.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] @@ -257798,249 +255783,192 @@ ./cxx-type.at:416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaas +./c++.at:1555: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal +stdout: +./c++.at:851: $PREPARSER ./input stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap +stdout: +./c++.at:572: $here/modern +stdout: +Modern C++: 202002 +./c++.at:572: $PREPARSER ./list stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffe53e1f->Object::Object { } -0xffe53ec8->Object::Object { 0xffe53e1f } -0xffe53e1f->Object::~Object { 0xffe53e1f, 0xffe53ec8 } -Next token is token 'a' (0xffe53ec8 'a') -0xffe53e18->Object::Object { 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0xffe53e18, 0xffe53ec8 } -Shifting token 'a' (0xffe53e18 'a') -0x56e613c4->Object::Object { 0xffe53e18 } -0xffe53e18->Object::~Object { 0x56e613c4, 0xffe53e18 } -Entering state 1 -Stack now 0 1 -0xffe53ed8->Object::Object { 0x56e613c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e613c4 'a') --> $$ = nterm item (0xffe53ed8 'a') -0x56e613c4->Object::~Object { 0x56e613c4, 0xffe53ed8 } -0x56e613c4->Object::Object { 0xffe53ed8 } -0xffe53ed8->Object::~Object { 0x56e613c4, 0xffe53ed8 } -Entering state 10 -Stack now 0 10 -Reading a token -0xffe53e1f->Object::Object { 0x56e613c4 } -0xffe53ec8->Object::Object { 0x56e613c4, 0xffe53e1f } -0xffe53e1f->Object::~Object { 0x56e613c4, 0xffe53e1f, 0xffe53ec8 } -Next token is token 'a' (0xffe53ec8 'a') -0xffe53e18->Object::Object { 0x56e613c4, 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0x56e613c4, 0xffe53e18, 0xffe53ec8 } -Shifting token 'a' (0xffe53e18 'a') -0x56e613d4->Object::Object { 0x56e613c4, 0xffe53e18 } -0xffe53e18->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53e18 } -Entering state 1 -Stack now 0 10 1 -0xffe53ed8->Object::Object { 0x56e613c4, 0x56e613d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e613d4 'a') --> $$ = nterm item (0xffe53ed8 'a') -0x56e613d4->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53ed8 } -0x56e613d4->Object::Object { 0x56e613c4, 0xffe53ed8 } -0xffe53ed8->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53ed8 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xffe53e1f->Object::Object { 0x56e613c4, 0x56e613d4 } -0xffe53ec8->Object::Object { 0x56e613c4, 0x56e613d4, 0xffe53e1f } -0xffe53e1f->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53e1f, 0xffe53ec8 } -Next token is token 'a' (0xffe53ec8 'a') -0xffe53e18->Object::Object { 0x56e613c4, 0x56e613d4, 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53e18, 0xffe53ec8 } -Shifting token 'a' (0xffe53e18 'a') -0x56e613e4->Object::Object { 0x56e613c4, 0x56e613d4, 0xffe53e18 } -0xffe53e18->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e18 } -Entering state 1 -Stack now 0 10 10 1 -0xffe53ed8->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e613e4 'a') --> $$ = nterm item (0xffe53ed8 'a') -0x56e613e4->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ed8 } -0x56e613e4->Object::Object { 0x56e613c4, 0x56e613d4, 0xffe53ed8 } -0xffe53ed8->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ed8 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xffe53e1f->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4 } -0xffe53ec8->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e1f } -0xffe53e1f->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e1f, 0xffe53ec8 } -Next token is token 'a' (0xffe53ec8 'a') -0xffe53e18->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e18, 0xffe53ec8 } -Shifting token 'a' (0xffe53e18 'a') -0x56e613f4->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e18 } -0xffe53e18->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53e18 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffe53ed8->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e613f4 'a') --> $$ = nterm item (0xffe53ed8 'a') -0x56e613f4->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53ed8 } -0x56e613f4->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ed8 } -0xffe53ed8->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53ed8 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xffe53e1f->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4 } -0xffe53ec8->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53e1f } -0xffe53e1f->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53e1f, 0xffe53ec8 } -Next token is token 'p' (0xffe53ec8 'p'Exception caught: cleaning lookahead and stack -0x56e613f4->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53ec8 } -0x56e613e4->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ec8 } -0x56e613d4->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53ec8 } -0x56e613c4->Object::~Object { 0x56e613c4, 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0xffe53ec8 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1555: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xffe53e1f->Object::Object { } -0xffe53ec8->Object::Object { 0xffe53e1f } -0xffe53e1f->Object::~Object { 0xffe53e1f, 0xffe53ec8 } -Next token is token 'a' (0xffe53ec8 'a') -0xffe53e18->Object::Object { 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0xffe53e18, 0xffe53ec8 } -Shifting token 'a' (0xffe53e18 'a') -0x56e613c4->Object::Object { 0xffe53e18 } -0xffe53e18->Object::~Object { 0x56e613c4, 0xffe53e18 } -Entering state 1 -Stack now 0 1 -0xffe53ed8->Object::Object { 0x56e613c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e613c4 'a') --> $$ = nterm item (0xffe53ed8 'a') -0x56e613c4->Object::~Object { 0x56e613c4, 0xffe53ed8 } -0x56e613c4->Object::Object { 0xffe53ed8 } -0xffe53ed8->Object::~Object { 0x56e613c4, 0xffe53ed8 } -Entering state 10 -Stack now 0 10 -Reading a token -0xffe53e1f->Object::Object { 0x56e613c4 } -0xffe53ec8->Object::Object { 0x56e613c4, 0xffe53e1f } -0xffe53e1f->Object::~Object { 0x56e613c4, 0xffe53e1f, 0xffe53ec8 } -Next token is token 'a' (0xffe53ec8 'a') -0xffe53e18->Object::Object { 0x56e613c4, 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0x56e613c4, 0xffe53e18, 0xffe53ec8 } -Shifting token 'a' (0xffe53e18 'a') -0x56e613d4->Object::Object { 0x56e613c4, 0xffe53e18 } -0xffe53e18->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53e18 } -Entering state 1 -Stack now 0 10 1 -0xffe53ed8->Object::Object { 0x56e613c4, 0x56e613d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e613d4 'a') --> $$ = nterm item (0xffe53ed8 'a') -0x56e613d4->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53ed8 } -0x56e613d4->Object::Object { 0x56e613c4, 0xffe53ed8 } -0xffe53ed8->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53ed8 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xffe53e1f->Object::Object { 0x56e613c4, 0x56e613d4 } -0xffe53ec8->Object::Object { 0x56e613c4, 0x56e613d4, 0xffe53e1f } -0xffe53e1f->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53e1f, 0xffe53ec8 } -Next token is token 'a' (0xffe53ec8 'a') -0xffe53e18->Object::Object { 0x56e613c4, 0x56e613d4, 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53e18, 0xffe53ec8 } -Shifting token 'a' (0xffe53e18 'a') -0x56e613e4->Object::Object { 0x56e613c4, 0x56e613d4, 0xffe53e18 } -0xffe53e18->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e18 } -Entering state 1 -Stack now 0 10 10 1 -0xffe53ed8->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e613e4 'a') --> $$ = nterm item (0xffe53ed8 'a') -0x56e613e4->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ed8 } -0x56e613e4->Object::Object { 0x56e613c4, 0x56e613d4, 0xffe53ed8 } -0xffe53ed8->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ed8 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xffe53e1f->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4 } -0xffe53ec8->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e1f } -0xffe53e1f->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e1f, 0xffe53ec8 } -Next token is token 'a' (0xffe53ec8 'a') -0xffe53e18->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e18, 0xffe53ec8 } -Shifting token 'a' (0xffe53e18 'a') -0x56e613f4->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53e18 } -0xffe53e18->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53e18 } -Entering state 1 -Stack now 0 10 10 10 1 -0xffe53ed8->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56e613f4 'a') --> $$ = nterm item (0xffe53ed8 'a') -0x56e613f4->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53ed8 } -0x56e613f4->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ed8 } -0xffe53ed8->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53ed8 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xffe53e1f->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4 } -0xffe53ec8->Object::Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53e1f } -0xffe53e1f->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53e1f, 0xffe53ec8 } -Next token is token 'p' (0xffe53ec8 'p'Exception caught: cleaning lookahead and stack -0x56e613f4->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0x56e613f4, 0xffe53ec8 } -0x56e613e4->Object::~Object { 0x56e613c4, 0x56e613d4, 0x56e613e4, 0xffe53ec8 } -0x56e613d4->Object::~Object { 0x56e613c4, 0x56e613d4, 0xffe53ec8 } -0x56e613c4->Object::~Object { 0x56e613c4, 0xffe53ec8 } -0xffe53ec8->Object::~Object { 0xffe53ec8 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1555: ./check +-std=c++11 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE +stdout: +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT +stdout: +./c++.at:857: $PREPARSER ./input stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:571: $here/modern +stdout: +Modern C++: 202002 +./c++.at:571: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:859: $PREPARSER ./input +./c++.at:566: $here/modern +stdout: +Modern C++: 202100 +./c++.at:566: $PREPARSER ./list stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +666. c++.at:566: ok + +stderr: +stdout: ======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ... +./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +stderr: +stdout: +./c++.at:569: $here/modern +stdout: +Modern C++: 202002 +./c++.at:569: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./cxx-type.at:412: $PREPARSER ./types test-input @@ -258615,8 +256543,13 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () +stderr: +stdout: ./cxx-type.at:412: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:417: $PREPARSER ./types test-input +stderr: stderr: +17.5: syntax error Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -259183,23 +257116,8 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -705. cxx-type.at:409: ok - -stderr: -stdout: -./c++.at:1555: $PREPARSER ./test -stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./cxx-type.at:417: $PREPARSER ./types test-input -stderr: -17.5: syntax error ./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ... -./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +705. cxx-type.at:409: ok ./cxx-type.at:417: $PREPARSER ./types -p test-input stderr: Starting parse @@ -259768,6 +257686,7 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) + ./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -260338,225 +258257,414 @@ Cleanup: popping nterm prog (1.1-19.5: ) 706. cxx-type.at:415: ok +708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ... +./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ... +./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: -stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS stderr: types.y:77.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ... -./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:574: $here/modern +stdout: +Modern C++: 202002 +./c++.at:574: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: +stdout: +./c++.at:1360: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +stderr: stderr: stdout: ./c++.at:1361: $PREPARSER ./input aaaas +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: reduction ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal +./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x570f7a00->Object::Object { } +Next token is token 'a' (0x570f7a00 'a') +Shifting token 'a' (0x570f7a00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x570f7a00 'a') +-> $$ = nterm item (0x570f7a00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x570f7a30->Object::Object { 0x570f7a00 } +Next token is token 'a' (0x570f7a30 'a') +Shifting token 'a' (0x570f7a30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x570f7a30 'a') +-> $$ = nterm item (0x570f7a30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x570f7a60->Object::Object { 0x570f7a00, 0x570f7a30 } +Next token is token 'a' (0x570f7a60 'a') +Shifting token 'a' (0x570f7a60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x570f7a60 'a') +-> $$ = nterm item (0x570f7a60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x570f7a90->Object::Object { 0x570f7a00, 0x570f7a30, 0x570f7a60 } +Next token is token 'a' (0x570f7a90 'a') +Shifting token 'a' (0x570f7a90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x570f7a90 'a') +-> $$ = nterm item (0x570f7a90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x570f7ac0->Object::Object { 0x570f7a00, 0x570f7a30, 0x570f7a60, 0x570f7a90 } +Next token is token 'p' (0x570f7ac0 'p'Exception caught: cleaning lookahead and stack +0x570f7ac0->Object::~Object { 0x570f7a00, 0x570f7a30, 0x570f7a60, 0x570f7a90, 0x570f7ac0 } +0x570f7a90->Object::~Object { 0x570f7a00, 0x570f7a30, 0x570f7a60, 0x570f7a90 } +0x570f7a60->Object::~Object { 0x570f7a00, 0x570f7a30, 0x570f7a60 } +0x570f7a30->Object::~Object { 0x570f7a00, 0x570f7a30 } +0x570f7a00->Object::~Object { 0x570f7a00 } +exception caught: printer +end { } +./c++.at:1361: $PREPARSER ./input aaaal +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x570f7a00->Object::Object { } +Next token is token 'a' (0x570f7a00 'a') +Shifting token 'a' (0x570f7a00 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x570f7a00 'a') +-> $$ = nterm item (0x570f7a00 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x570f7a30->Object::Object { 0x570f7a00 } +Next token is token 'a' (0x570f7a30 'a') +Shifting token 'a' (0x570f7a30 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x570f7a30 'a') +-> $$ = nterm item (0x570f7a30 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x570f7a60->Object::Object { 0x570f7a00, 0x570f7a30 } +Next token is token 'a' (0x570f7a60 'a') +Shifting token 'a' (0x570f7a60 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x570f7a60 'a') +-> $$ = nterm item (0x570f7a60 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x570f7a90->Object::Object { 0x570f7a00, 0x570f7a30, 0x570f7a60 } +Next token is token 'a' (0x570f7a90 'a') +Shifting token 'a' (0x570f7a90 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x570f7a90 'a') +-> $$ = nterm item (0x570f7a90 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x570f7ac0->Object::Object { 0x570f7a00, 0x570f7a30, 0x570f7a60, 0x570f7a90 } +Next token is token 'p' (0x570f7ac0 'p'Exception caught: cleaning lookahead and stack +0x570f7ac0->Object::~Object { 0x570f7a00, 0x570f7a30, 0x570f7a60, 0x570f7a90, 0x570f7ac0 } +0x570f7a90->Object::~Object { 0x570f7a00, 0x570f7a30, 0x570f7a60, 0x570f7a90 } +0x570f7a60->Object::~Object { 0x570f7a00, 0x570f7a30, 0x570f7a60 } +0x570f7a30->Object::~Object { 0x570f7a00, 0x570f7a30 } +0x570f7a00->Object::~Object { 0x570f7a00 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae ./c++.at:1361: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaap +./c++.at:1360: $PREPARSER ./input aaaaE stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 Starting parse Entering state 0 Stack now 0 Reading a token -0x5814ea00->Object::Object { } -Next token is token 'a' (0x5814ea00 'a') -Shifting token 'a' (0x5814ea00 'a') +0x5759ea00->Object::Object { } +Next token is token 'a' (0x5759ea00 'a') +Shifting token 'a' (0x5759ea00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5814ea00 'a') --> $$ = nterm item (0x5814ea00 'a') + $1 = token 'a' (0x5759ea00 'a') +-> $$ = nterm item (0x5759ea00 'a') Entering state 10 Stack now 0 10 Reading a token -0x5814ea30->Object::Object { 0x5814ea00 } -Next token is token 'a' (0x5814ea30 'a') -Shifting token 'a' (0x5814ea30 'a') +0x5759ea30->Object::Object { 0x5759ea00 } +Next token is token 'a' (0x5759ea30 'a') +Shifting token 'a' (0x5759ea30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5814ea30 'a') --> $$ = nterm item (0x5814ea30 'a') + $1 = token 'a' (0x5759ea30 'a') +-> $$ = nterm item (0x5759ea30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x5814ea60->Object::Object { 0x5814ea00, 0x5814ea30 } -Next token is token 'a' (0x5814ea60 'a') -Shifting token 'a' (0x5814ea60 'a') +0x5759ea60->Object::Object { 0x5759ea00, 0x5759ea30 } +Next token is token 'a' (0x5759ea60 'a') +Shifting token 'a' (0x5759ea60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5814ea60 'a') --> $$ = nterm item (0x5814ea60 'a') + $1 = token 'a' (0x5759ea60 'a') +-> $$ = nterm item (0x5759ea60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x5814ea90->Object::Object { 0x5814ea00, 0x5814ea30, 0x5814ea60 } -Next token is token 'a' (0x5814ea90 'a') -Shifting token 'a' (0x5814ea90 'a') +0x5759ea90->Object::Object { 0x5759ea00, 0x5759ea30, 0x5759ea60 } +Next token is token 'a' (0x5759ea90 'a') +Shifting token 'a' (0x5759ea90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5814ea90 'a') --> $$ = nterm item (0x5814ea90 'a') + $1 = token 'a' (0x5759ea90 'a') +-> $$ = nterm item (0x5759ea90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x5814eac0->Object::Object { 0x5814ea00, 0x5814ea30, 0x5814ea60, 0x5814ea90 } -Next token is token 'p' (0x5814eac0 'p'Exception caught: cleaning lookahead and stack -0x5814eac0->Object::~Object { 0x5814ea00, 0x5814ea30, 0x5814ea60, 0x5814ea90, 0x5814eac0 } -0x5814ea90->Object::~Object { 0x5814ea00, 0x5814ea30, 0x5814ea60, 0x5814ea90 } -0x5814ea60->Object::~Object { 0x5814ea00, 0x5814ea30, 0x5814ea60 } -0x5814ea30->Object::~Object { 0x5814ea00, 0x5814ea30 } -0x5814ea00->Object::~Object { 0x5814ea00 } +0x5759eac0->Object::Object { 0x5759ea00, 0x5759ea30, 0x5759ea60, 0x5759ea90 } +Next token is token 'p' (0x5759eac0 'p'Exception caught: cleaning lookahead and stack +0x5759eac0->Object::~Object { 0x5759ea00, 0x5759ea30, 0x5759ea60, 0x5759ea90, 0x5759eac0 } +0x5759ea90->Object::~Object { 0x5759ea00, 0x5759ea30, 0x5759ea60, 0x5759ea90 } +0x5759ea60->Object::~Object { 0x5759ea00, 0x5759ea30, 0x5759ea60 } +0x5759ea30->Object::~Object { 0x5759ea00, 0x5759ea30 } +0x5759ea00->Object::~Object { 0x5759ea00 } exception caught: printer end { } -stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1066: $PREPARSER ./input < in +./c++.at:1360: $PREPARSER ./input aaaaT stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x5814ea00->Object::Object { } -Next token is token 'a' (0x5814ea00 'a') -Shifting token 'a' (0x5814ea00 'a') +0x5759ea00->Object::Object { } +Next token is token 'a' (0x5759ea00 'a') +Shifting token 'a' (0x5759ea00 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5814ea00 'a') --> $$ = nterm item (0x5814ea00 'a') + $1 = token 'a' (0x5759ea00 'a') +-> $$ = nterm item (0x5759ea00 'a') Entering state 10 Stack now 0 10 Reading a token -0x5814ea30->Object::Object { 0x5814ea00 } -Next token is token 'a' (0x5814ea30 'a') -Shifting token 'a' (0x5814ea30 'a') +0x5759ea30->Object::Object { 0x5759ea00 } +Next token is token 'a' (0x5759ea30 'a') +Shifting token 'a' (0x5759ea30 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5814ea30 'a') --> $$ = nterm item (0x5814ea30 'a') + $1 = token 'a' (0x5759ea30 'a') +-> $$ = nterm item (0x5759ea30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x5814ea60->Object::Object { 0x5814ea00, 0x5814ea30 } -Next token is token 'a' (0x5814ea60 'a') -Shifting token 'a' (0x5814ea60 'a') +0x5759ea60->Object::Object { 0x5759ea00, 0x5759ea30 } +Next token is token 'a' (0x5759ea60 'a') +Shifting token 'a' (0x5759ea60 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5814ea60 'a') --> $$ = nterm item (0x5814ea60 'a') + $1 = token 'a' (0x5759ea60 'a') +-> $$ = nterm item (0x5759ea60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x5814ea90->Object::Object { 0x5814ea00, 0x5814ea30, 0x5814ea60 } -Next token is token 'a' (0x5814ea90 'a') -Shifting token 'a' (0x5814ea90 'a') +0x5759ea90->Object::Object { 0x5759ea00, 0x5759ea30, 0x5759ea60 } +Next token is token 'a' (0x5759ea90 'a') +Shifting token 'a' (0x5759ea90 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5814ea90 'a') --> $$ = nterm item (0x5814ea90 'a') + $1 = token 'a' (0x5759ea90 'a') +-> $$ = nterm item (0x5759ea90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x5814eac0->Object::Object { 0x5814ea00, 0x5814ea30, 0x5814ea60, 0x5814ea90 } -Next token is token 'p' (0x5814eac0 'p'Exception caught: cleaning lookahead and stack -0x5814eac0->Object::~Object { 0x5814ea00, 0x5814ea30, 0x5814ea60, 0x5814ea90, 0x5814eac0 } -0x5814ea90->Object::~Object { 0x5814ea00, 0x5814ea30, 0x5814ea60, 0x5814ea90 } -0x5814ea60->Object::~Object { 0x5814ea00, 0x5814ea30, 0x5814ea60 } -0x5814ea30->Object::~Object { 0x5814ea00, 0x5814ea30 } -0x5814ea00->Object::~Object { 0x5814ea00 } +0x5759eac0->Object::Object { 0x5759ea00, 0x5759ea30, 0x5759ea60, 0x5759ea90 } +Next token is token 'p' (0x5759eac0 'p'Exception caught: cleaning lookahead and stack +0x5759eac0->Object::~Object { 0x5759ea00, 0x5759ea30, 0x5759ea60, 0x5759ea90, 0x5759eac0 } +0x5759ea90->Object::~Object { 0x5759ea00, 0x5759ea30, 0x5759ea60, 0x5759ea90 } +0x5759ea60->Object::~Object { 0x5759ea00, 0x5759ea30, 0x5759ea60 } +0x5759ea30->Object::~Object { 0x5759ea00, 0x5759ea30 } +0x5759ea00->Object::~Object { 0x5759ea00 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1360: $PREPARSER ./input aaaaR stderr: -./c++.at:1066: $PREPARSER ./input < in exception caught: syntax error ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaE +685. c++.at:1360: ok stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1066: $PREPARSER ./input < in ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaT -684. c++.at:1066: ok stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ./c++.at:1361: $PREPARSER ./input aaaaR stderr: - ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -686. c++.at:1361: ok - -709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ... -./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ... ./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -stderr: stdout: -./c++.at:859: $PREPARSER ./input +./c++.at:573: $here/modern +stdout: +Modern C++: 202002 +./c++.at:573: $PREPARSER ./list stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./cxx-type.at:423: $PREPARSER ./types test-input stderr: syntax error ./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./cxx-type.at:423: $PREPARSER ./types -p test-input stderr: +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -261123,6 +259231,7 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () +./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS ./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse @@ -261693,1396 +259802,531 @@ Cleanup: popping nterm prog () 707. cxx-type.at:420: ok -711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ... -./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:1362: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal +711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ... +./c++.at:1555: $PREPARSER ./test +./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: -exception caught: yylex -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input i +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -exception caught: initial-action +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: types.y:77.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:429: $PREPARSER ./types test-input ./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -17.5: syntax error -./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap +stdout: +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y stderr: stdout: ./c++.at:1363: $PREPARSER ./input aaaas stderr: -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: reduction -./cxx-type.at:429: $PREPARSER ./types -p test-input +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal stderr: -stdout: -./cxx-type.at:435: $PREPARSER ./types test-input +exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i stderr: -syntax error -./c++.at:1362: $PREPARSER ./input --debug aaaap -./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 71): --> $$ = nterm prog (1.1: ) -Entering state 1 -Reading a token -Next token is token ID (3.0: ) -Shifting token ID (3.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.0: ) --> $$ = nterm expr (3.0: ) -Entering state 8 -Reading a token -Next token is token '+' (3.2: ) -Shifting token '+' (3.2: ) -Entering state 15 -Reading a token -Next token is token ID (3.4: ) -Shifting token ID (3.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.4: ) --> $$ = nterm expr (3.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (3.0: ) - $2 = token '+' (3.2: ) - $3 = nterm expr (3.4: ) --> $$ = nterm expr (3.0-4: ) -Entering state 8 +Stack now 0 Reading a token -Next token is token ';' (3.5: ) -Shifting token ';' (3.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (3.0-4: ) - $2 = token ';' (3.5: ) --> $$ = nterm stmt (3.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1: ) - $2 = nterm stmt (3.0-5: ) --> $$ = nterm prog (1.1-3.5: ) +0xffa37ebf->Object::Object { } +0xffa37f68->Object::Object { 0xffa37ebf } +0xffa37ebf->Object::~Object { 0xffa37ebf, 0xffa37f68 } +Next token is token 'a' (0xffa37f68 'a') +0xffa37eb8->Object::Object { 0xffa37f68 } +0xffa37f68->Object::~Object { 0xffa37eb8, 0xffa37f68 } +Shifting token 'a' (0xffa37eb8 'a') +0x566ff3c4->Object::Object { 0xffa37eb8 } +0xffa37eb8->Object::~Object { 0x566ff3c4, 0xffa37eb8 } Entering state 1 +Stack now 0 1 +0xffa37f78->Object::Object { 0x566ff3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566ff3c4 'a') +-> $$ = nterm item (0xffa37f78 'a') +0x566ff3c4->Object::~Object { 0x566ff3c4, 0xffa37f78 } +0x566ff3c4->Object::Object { 0xffa37f78 } +0xffa37f78->Object::~Object { 0x566ff3c4, 0xffa37f78 } +Entering state 10 +Stack now 0 10 Reading a token -Next token is token TYPENAME (5.0: ) -Shifting token TYPENAME (5.0: ) -Entering state 4 -Reading a token -Next token is token ID (5.2: ) -Shifting token ID (5.2: ) -Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (5.2: ) --> $$ = nterm declarator (5.2: ) -Entering state 13 -Reading a token -Next token is token ';' (5.3: ) -Shifting token ';' (5.3: ) -Entering state 23 -Reducing stack 0 by rule 11 (line 97): - $1 = token TYPENAME (5.0: ) - $2 = nterm declarator (5.2: ) - $3 = token ';' (5.3: ) --> $$ = nterm decl (5.0-3: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (5.0-3: ) --> $$ = nterm stmt (5.0-3: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-3.5: ) - $2 = nterm stmt (5.0-3: ) --> $$ = nterm prog (1.1-5.3: ) +0xffa37ebf->Object::Object { 0x566ff3c4 } +0xffa37f68->Object::Object { 0x566ff3c4, 0xffa37ebf } +0xffa37ebf->Object::~Object { 0x566ff3c4, 0xffa37ebf, 0xffa37f68 } +Next token is token 'a' (0xffa37f68 'a') +0xffa37eb8->Object::Object { 0x566ff3c4, 0xffa37f68 } +0xffa37f68->Object::~Object { 0x566ff3c4, 0xffa37eb8, 0xffa37f68 } +Shifting token 'a' (0xffa37eb8 'a') +0x566ff3d4->Object::Object { 0x566ff3c4, 0xffa37eb8 } +0xffa37eb8->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37eb8 } Entering state 1 +Stack now 0 10 1 +0xffa37f78->Object::Object { 0x566ff3c4, 0x566ff3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566ff3d4 'a') +-> $$ = nterm item (0xffa37f78 'a') +0x566ff3d4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f78 } +0x566ff3d4->Object::Object { 0x566ff3c4, 0xffa37f78 } +0xffa37f78->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f78 } +Entering state 10 +Stack now 0 10 10 Reading a token -Next token is token TYPENAME (7.0: ) -Shifting token TYPENAME (7.0: ) -Entering state 4 -Reading a token -Next token is token ID (7.2: ) -Shifting token ID (7.2: ) -Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (7.2: ) --> $$ = nterm declarator (7.2: ) -Entering state 13 -Reading a token -Next token is token '=' (7.4: ) -Shifting token '=' (7.4: ) -Entering state 22 -Reading a token -Next token is token ID (7.6: ) -Shifting token ID (7.6: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (7.6: ) --> $$ = nterm expr (7.6: ) -Entering state 29 -Reading a token -Next token is token ';' (7.7: ) -Shifting token ';' (7.7: ) -Entering state 30 -Reducing stack 0 by rule 12 (line 99): - $1 = token TYPENAME (7.0: ) - $2 = nterm declarator (7.2: ) - $3 = token '=' (7.4: ) - $4 = nterm expr (7.6: ) - $5 = token ';' (7.7: ) --> $$ = nterm decl (7.0-7: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (7.0-7: ) --> $$ = nterm stmt (7.0-7: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-5.3: ) - $2 = nterm stmt (7.0-7: ) --> $$ = nterm prog (1.1-7.7: ) +0xffa37ebf->Object::Object { 0x566ff3c4, 0x566ff3d4 } +0xffa37f68->Object::Object { 0x566ff3c4, 0x566ff3d4, 0xffa37ebf } +0xffa37ebf->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37ebf, 0xffa37f68 } +Next token is token 'a' (0xffa37f68 'a') +0xffa37eb8->Object::Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f68 } +0xffa37f68->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37eb8, 0xffa37f68 } +Shifting token 'a' (0xffa37eb8 'a') +0x566ff3e4->Object::Object { 0x566ff3c4, 0x566ff3d4, 0xffa37eb8 } +0xffa37eb8->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37eb8 } Entering state 1 +Stack now 0 10 10 1 +0xffa37f78->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566ff3e4 'a') +-> $$ = nterm item (0xffa37f78 'a') +0x566ff3e4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f78 } +0x566ff3e4->Object::Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f78 } +0xffa37f78->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f78 } +Entering state 10 +Stack now 0 10 10 10 Reading a token -Next token is token ID (9.0: ) -Shifting token ID (9.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.0: ) --> $$ = nterm expr (9.0: ) -Entering state 8 -Reading a token -Next token is token '=' (9.2: ) -Shifting token '=' (9.2: ) -Entering state 14 -Reading a token -Next token is token ID (9.4: ) -Shifting token ID (9.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.4: ) --> $$ = nterm expr (9.4: ) -Entering state 24 -Reading a token -Next token is token ';' (9.5: ) -Reducing stack 0 by rule 10 (line 94): - $1 = nterm expr (9.0: ) - $2 = token '=' (9.2: ) - $3 = nterm expr (9.4: ) --> $$ = nterm expr (9.0-4: ) -Entering state 8 -Next token is token ';' (9.5: ) -Shifting token ';' (9.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (9.0-4: ) - $2 = token ';' (9.5: ) --> $$ = nterm stmt (9.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-7.7: ) - $2 = nterm stmt (9.0-5: ) --> $$ = nterm prog (1.1-9.5: ) +0xffa37ebf->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4 } +0xffa37f68->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37ebf } +0xffa37ebf->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37ebf, 0xffa37f68 } +Next token is token 'a' (0xffa37f68 'a') +0xffa37eb8->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f68 } +0xffa37f68->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37eb8, 0xffa37f68 } +Shifting token 'a' (0xffa37eb8 'a') +0x566ff3f4->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37eb8 } +0xffa37eb8->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37eb8 } Entering state 1 +Stack now 0 10 10 10 1 +0xffa37f78->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566ff3f4 'a') +-> $$ = nterm item (0xffa37f78 'a') +0x566ff3f4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37f78 } +0x566ff3f4->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f78 } +0xffa37f78->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37f78 } +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -Next token is token TYPENAME (11.0: ) -Shifting token TYPENAME (11.0: ) -Entering state 4 -Reading a token -Next token is token '(' (11.2: ) -Shifting token '(' (11.2: ) -Entering state 12 -Reading a token -Next token is token ID (11.3: ) -Shifting token ID (11.3: ) -Entering state 18 -Reading a token -Next token is token ')' (11.4: ) -Stack 0 Entering state 18 -Next token is token ')' (11.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (11.4: ) -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -On stack 0, shifting token ')' (11.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (11.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '+' (11.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '+' (11.6: ) -Stack 1 dies. -Removing dead stacks. -On stack 0, shifting token '+' (11.6: ) -Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (11.3: ) --> $$ = nterm expr (11.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (11.0: ) - $2 = token '(' (11.2: ) - $3 = nterm expr (11.3: ) - $4 = token ')' (11.4: ) --> $$ = nterm expr (11.0-4: ) -Returning to deterministic operation. -Entering state 15 -Reading a token -Next token is token ID (11.8: ) -Shifting token ID (11.8: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (11.8: ) --> $$ = nterm expr (11.8: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (11.0-4: ) - $2 = token '+' (11.6: ) - $3 = nterm expr (11.8: ) --> $$ = nterm expr (11.0-8: ) -Entering state 8 +0xffa37ebf->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4 } +0xffa37f68->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37ebf } +0xffa37ebf->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37ebf, 0xffa37f68 } +Next token is token 'p' (0xffa37f68 'p'Exception caught: cleaning lookahead and stack +0x566ff3f4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37f68 } +0x566ff3e4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f68 } +0x566ff3d4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f68 } +0x566ff3c4->Object::~Object { 0x566ff3c4, 0xffa37f68 } +0xffa37f68->Object::~Object { 0xffa37f68 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ';' (11.9: ) -Shifting token ';' (11.9: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (11.0-8: ) - $2 = token ';' (11.9: ) --> $$ = nterm stmt (11.0-9: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-9.5: ) - $2 = nterm stmt (11.0-9: ) --> $$ = nterm prog (1.1-11.9: ) +0xffa37ebf->Object::Object { } +0xffa37f68->Object::Object { 0xffa37ebf } +0xffa37ebf->Object::~Object { 0xffa37ebf, 0xffa37f68 } +Next token is token 'a' (0xffa37f68 'a') +0xffa37eb8->Object::Object { 0xffa37f68 } +0xffa37f68->Object::~Object { 0xffa37eb8, 0xffa37f68 } +Shifting token 'a' (0xffa37eb8 'a') +0x566ff3c4->Object::Object { 0xffa37eb8 } +0xffa37eb8->Object::~Object { 0x566ff3c4, 0xffa37eb8 } Entering state 1 -Reading a token -Next token is token TYPENAME (13.0: ) -Shifting token TYPENAME (13.0: ) -Entering state 4 -Reading a token -Next token is token '(' (13.2: ) -Shifting token '(' (13.2: ) -Entering state 12 -Reading a token -Next token is token ID (13.3: ) -Shifting token ID (13.3: ) -Entering state 18 -Reading a token -Next token is token ')' (13.4: ) -Stack 0 Entering state 18 -Next token is token ')' (13.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (13.4: ) -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -On stack 0, shifting token ')' (13.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (13.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token ';' (13.5: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token ';' (13.5: ) -On stack 0, shifting token ';' (13.5: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (13.5: ) -Stack 1 now in state 23 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (15.0: ) -Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (15.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (13.3: ) --> $$ = nterm declarator (13.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (13.2: ) - $2 = nterm declarator (13.3: ) - $3 = token ')' (13.4: ) --> $$ = nterm declarator (13.2-4: ) -Reducing stack -1 by rule 11 (line 97): - $1 = token TYPENAME (13.0: ) - $2 = nterm declarator (13.2-4: ) - $3 = token ';' (13.5: ) --> $$ = nterm decl (13.0-5: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (13.0-5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-11.9: ) - $2 = nterm stmt (13.0-5: ) --> $$ = nterm prog (1.1-13.5: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (15.2: ) -Shifting token '(' (15.2: ) -Entering state 12 -Reading a token -Next token is token ID (15.3: ) -Shifting token ID (15.3: ) -Entering state 18 -Reading a token -Next token is token ')' (15.4: ) -Stack 0 Entering state 18 -Next token is token ')' (15.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (15.4: ) -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -On stack 0, shifting token ')' (15.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (15.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '=' (15.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '=' (15.6: ) -On stack 0, shifting token '=' (15.6: ) -Stack 0 now in state 14 -On stack 1, shifting token '=' (15.6: ) -Stack 1 now in state 22 -Stack 0 Entering state 14 -Reading a token -Next token is token ID (15.8: ) -Stack 1 Entering state 22 -Next token is token ID (15.8: ) -On stack 0, shifting token ID (15.8: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.8: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token '+' (15.10: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token '+' (15.10: ) -On stack 0, shifting token '+' (15.10: ) -Stack 0 now in state 15 -On stack 1, shifting token '+' (15.10: ) -Stack 1 now in state 15 -Stack 0 Entering state 15 -Reading a token -Next token is token ID (15.12: ) -Stack 1 Entering state 15 -Next token is token ID (15.12: ) -On stack 0, shifting token ID (15.12: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.12: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. -Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token ';' (15.13: ) -Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. -Stack 0 Entering state 8 -Next token is token ';' (15.13: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. -Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token ';' (15.13: ) -On stack 0, shifting token ';' (15.13: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (15.13: ) -Stack 1 now in state 30 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (17.0: ) -Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (17.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (15.3: ) --> $$ = nterm declarator (15.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (15.2: ) - $2 = nterm declarator (15.3: ) - $3 = token ')' (15.4: ) --> $$ = nterm declarator (15.2-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 12 (line 99): - $1 = token TYPENAME (15.0: ) - $2 = nterm declarator (15.2-4: ) - $3 = token '=' (15.6: ) - $4 = nterm expr (15.8-12: ) - $5 = token ';' (15.13: ) --> $$ = nterm decl (15.0-13: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (15.0-13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-13.5: ) - $2 = nterm stmt (15.0-13: ) --> $$ = nterm prog (1.1-15.13: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (17.2: ) -Shifting token '(' (17.2: ) -Entering state 12 -Reading a token -Next token is token ID (17.3: ) -Shifting token ID (17.3: ) -Entering state 18 -Reading a token -Next token is token ID (17.5: ) -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (17.3: ) --> $$ = nterm expr (17.3: ) -Entering state 20 -Next token is token ID (17.5: ) -17.5: syntax error -Error: popping nterm expr (17.3: ) -Error: popping token '(' (17.2: ) -Error: popping token TYPENAME (17.0: ) -Shifting token error (17.0-5: ) -Entering state 3 -Next token is token ID (17.5: ) -Error: discarding token ID (17.5: ) -Reading a token -Next token is token ')' (17.6: ) -Error: discarding token ')' (17.6: ) -Reading a token -Next token is token '=' (17.8: ) -Error: discarding token '=' (17.8: ) -Reading a token -Next token is token ID (17.10: ) -Error: discarding token ID (17.10: ) -Reading a token -Next token is token '+' (17.12: ) -Error: discarding token '+' (17.12: ) -Reading a token -Next token is token ID (17.14: ) -Error: discarding token ID (17.14: ) -Reading a token -Next token is token ';' (17.15: ) -Entering state 3 -Next token is token ';' (17.15: ) -Shifting token ';' (17.15: ) +Stack now 0 1 +0xffa37f78->Object::Object { 0x566ff3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566ff3c4 'a') +-> $$ = nterm item (0xffa37f78 'a') +0x566ff3c4->Object::~Object { 0x566ff3c4, 0xffa37f78 } +0x566ff3c4->Object::Object { 0xffa37f78 } +0xffa37f78->Object::~Object { 0x566ff3c4, 0xffa37f78 } Entering state 10 -Reducing stack 0 by rule 5 (line 86): - $1 = token error (17.0-14: ) - $2 = token ';' (17.15: ) --> $$ = nterm stmt (17.0-15: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-15.13: ) - $2 = nterm stmt (17.0-15: ) --> $$ = nterm prog (1.1-17.15: ) -Entering state 1 -Reading a token -Next token is token ID (19.0: ) -Shifting token ID (19.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.0: ) --> $$ = nterm expr (19.0: ) -Entering state 8 +Stack now 0 10 Reading a token -Next token is token '+' (19.2: ) -Shifting token '+' (19.2: ) -Entering state 15 +0xffa37ebf->Object::Object { 0x566ff3c4 } +0xffa37f68->Object::Object { 0x566ff3c4, 0xffa37ebf } +0xffa37ebf->Object::~Object { 0x566ff3c4, 0xffa37ebf, 0xffa37f68 } +Next token is token 'a' (0xffa37f68 'a') +0xffa37eb8->Object::Object { 0x566ff3c4, 0xffa37f68 } +0xffa37f68->Object::~Object { 0x566ff3c4, 0xffa37eb8, 0xffa37f68 } +Shifting token 'a' (0xffa37eb8 'a') +0x566ff3d4->Object::Object { 0x566ff3c4, 0xffa37eb8 } +0xffa37eb8->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37eb8 } +Entering state 1 +Stack now 0 10 1 +0xffa37f78->Object::Object { 0x566ff3c4, 0x566ff3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566ff3d4 'a') +-> $$ = nterm item (0xffa37f78 'a') +0x566ff3d4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f78 } +0x566ff3d4->Object::Object { 0x566ff3c4, 0xffa37f78 } +0xffa37f78->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f78 } +Entering state 10 +Stack now 0 10 10 Reading a token -Next token is token ID (19.4: ) -Shifting token ID (19.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.4: ) --> $$ = nterm expr (19.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (19.0: ) - $2 = token '+' (19.2: ) - $3 = nterm expr (19.4: ) --> $$ = nterm expr (19.0-4: ) -Entering state 8 +0xffa37ebf->Object::Object { 0x566ff3c4, 0x566ff3d4 } +0xffa37f68->Object::Object { 0x566ff3c4, 0x566ff3d4, 0xffa37ebf } +0xffa37ebf->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37ebf, 0xffa37f68 } +Next token is token 'a' (0xffa37f68 'a') +0xffa37eb8->Object::Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f68 } +0xffa37f68->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37eb8, 0xffa37f68 } +Shifting token 'a' (0xffa37eb8 'a') +0x566ff3e4->Object::Object { 0x566ff3c4, 0x566ff3d4, 0xffa37eb8 } +0xffa37eb8->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37eb8 } +Entering state 1 +Stack now 0 10 10 1 +0xffa37f78->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566ff3e4 'a') +-> $$ = nterm item (0xffa37f78 'a') +0x566ff3e4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f78 } +0x566ff3e4->Object::Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f78 } +0xffa37f78->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f78 } +Entering state 10 +Stack now 0 10 10 10 Reading a token -Next token is token ';' (19.5: ) -Shifting token ';' (19.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (19.0-4: ) - $2 = token ';' (19.5: ) --> $$ = nterm stmt (19.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-17.15: ) - $2 = nterm stmt (19.0-5: ) --> $$ = nterm prog (1.1-19.5: ) +0xffa37ebf->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4 } +0xffa37f68->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37ebf } +0xffa37ebf->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37ebf, 0xffa37f68 } +Next token is token 'a' (0xffa37f68 'a') +0xffa37eb8->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f68 } +0xffa37f68->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37eb8, 0xffa37f68 } +Shifting token 'a' (0xffa37eb8 'a') +0x566ff3f4->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37eb8 } +0xffa37eb8->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37eb8 } Entering state 1 +Stack now 0 10 10 10 1 +0xffa37f78->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x566ff3f4 'a') +-> $$ = nterm item (0xffa37f78 'a') +0x566ff3f4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37f78 } +0x566ff3f4->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f78 } +0xffa37f78->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37f78 } +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -Next token is token '@' (21.0: ) -Shifting token '@' (21.0: ) -Entering state 6 -Reducing stack 0 by rule 6 (line 87): - $1 = token '@' (21.0: ) -Cleanup: popping nterm prog (1.1-19.5: ) +0xffa37ebf->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4 } +0xffa37f68->Object::Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37ebf } +0xffa37ebf->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37ebf, 0xffa37f68 } +Next token is token 'p' (0xffa37f68 'p'Exception caught: cleaning lookahead and stack +0x566ff3f4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0x566ff3f4, 0xffa37f68 } +0x566ff3e4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0x566ff3e4, 0xffa37f68 } +0x566ff3d4->Object::~Object { 0x566ff3c4, 0x566ff3d4, 0xffa37f68 } +0x566ff3c4->Object::~Object { 0x566ff3c4, 0xffa37f68 } +0xffa37f68->Object::~Object { 0xffa37f68 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1363: $PREPARSER ./input aaaaE +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: -./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:435: $PREPARSER ./types -p test-input Starting parse Entering state 0 Stack now 0 Reading a token -0xffb55f8f->Object::Object { } -0xffb56038->Object::Object { 0xffb55f8f } -0xffb55f8f->Object::~Object { 0xffb55f8f, 0xffb56038 } -Next token is token 'a' (0xffb56038 'a') -0xffb55f88->Object::Object { 0xffb56038 } -0xffb56038->Object::~Object { 0xffb55f88, 0xffb56038 } -Shifting token 'a' (0xffb55f88 'a') -0x56af03c4->Object::Object { 0xffb55f88 } -0xffb55f88->Object::~Object { 0x56af03c4, 0xffb55f88 } +0xffdecd3f->Object::Object { } +0xffdecde8->Object::Object { 0xffdecd3f } +0xffdecd3f->Object::~Object { 0xffdecd3f, 0xffdecde8 } +Next token is token 'a' (0xffdecde8 'a') +0xffdecd38->Object::Object { 0xffdecde8 } +0xffdecde8->Object::~Object { 0xffdecd38, 0xffdecde8 } +Shifting token 'a' (0xffdecd38 'a') +0x57fce3c4->Object::Object { 0xffdecd38 } +0xffdecd38->Object::~Object { 0x57fce3c4, 0xffdecd38 } Entering state 2 Stack now 0 2 -0xffb56048->Object::Object { 0x56af03c4 } +0xffdecdf8->Object::Object { 0x57fce3c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56af03c4 'a') --> $$ = nterm item (0xffb56048 'a') -0x56af03c4->Object::~Object { 0x56af03c4, 0xffb56048 } -0x56af03c4->Object::Object { 0xffb56048 } -0xffb56048->Object::~Object { 0x56af03c4, 0xffb56048 } + $1 = token 'a' (0x57fce3c4 'a') +-> $$ = nterm item (0xffdecdf8 'a') +0x57fce3c4->Object::~Object { 0x57fce3c4, 0xffdecdf8 } +0x57fce3c4->Object::Object { 0xffdecdf8 } +0xffdecdf8->Object::~Object { 0x57fce3c4, 0xffdecdf8 } Entering state 11 Stack now 0 11 Reading a token -0xffb55f8f->Object::Object { 0x56af03c4 } -0xffb56038->Object::Object { 0x56af03c4, 0xffb55f8f } -0xffb55f8f->Object::~Object { 0x56af03c4, 0xffb55f8f, 0xffb56038 } -Next token is token 'a' (0xffb56038 'a') -0xffb55f88->Object::Object { 0x56af03c4, 0xffb56038 } -0xffb56038->Object::~Object { 0x56af03c4, 0xffb55f88, 0xffb56038 } -Shifting token 'a' (0xffb55f88 'a') -0x56af03d4->Object::Object { 0x56af03c4, 0xffb55f88 } -0xffb55f88->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb55f88 } +0xffdecd3f->Object::Object { 0x57fce3c4 } +0xffdecde8->Object::Object { 0x57fce3c4, 0xffdecd3f } +0xffdecd3f->Object::~Object { 0x57fce3c4, 0xffdecd3f, 0xffdecde8 } +Next token is token 'a' (0xffdecde8 'a') +0xffdecd38->Object::Object { 0x57fce3c4, 0xffdecde8 } +0xffdecde8->Object::~Object { 0x57fce3c4, 0xffdecd38, 0xffdecde8 } +Shifting token 'a' (0xffdecd38 'a') +0x57fce3d4->Object::Object { 0x57fce3c4, 0xffdecd38 } +0xffdecd38->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd38 } Entering state 2 Stack now 0 11 2 -0xffb56048->Object::Object { 0x56af03c4, 0x56af03d4 } +0xffdecdf8->Object::Object { 0x57fce3c4, 0x57fce3d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56af03d4 'a') --> $$ = nterm item (0xffb56048 'a') -0x56af03d4->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb56048 } -0x56af03d4->Object::Object { 0x56af03c4, 0xffb56048 } -0xffb56048->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb56048 } + $1 = token 'a' (0x57fce3d4 'a') +-> $$ = nterm item (0xffdecdf8 'a') +0x57fce3d4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecdf8 } +0x57fce3d4->Object::Object { 0x57fce3c4, 0xffdecdf8 } +0xffdecdf8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecdf8 } Entering state 11 Stack now 0 11 11 Reading a token -0xffb55f8f->Object::Object { 0x56af03c4, 0x56af03d4 } -0xffb56038->Object::Object { 0x56af03c4, 0x56af03d4, 0xffb55f8f } -0xffb55f8f->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb55f8f, 0xffb56038 } -Next token is token 'a' (0xffb56038 'a') -0xffb55f88->Object::Object { 0x56af03c4, 0x56af03d4, 0xffb56038 } -0xffb56038->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb55f88, 0xffb56038 } -Shifting token 'a' (0xffb55f88 'a') -0x56af03e4->Object::Object { 0x56af03c4, 0x56af03d4, 0xffb55f88 } -0xffb55f88->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f88 } +0xffdecd3f->Object::Object { 0x57fce3c4, 0x57fce3d4 } +0xffdecde8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd3f } +0xffdecd3f->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd3f, 0xffdecde8 } +Next token is token 'a' (0xffdecde8 'a') +0xffdecd38->Object::Object { 0x57fce3c4, 0x57fce3d4, 0xffdecde8 } +0xffdecde8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd38, 0xffdecde8 } +Shifting token 'a' (0xffdecd38 'a') +0x57fce3e4->Object::Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd38 } +0xffdecd38->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd38 } Entering state 2 Stack now 0 11 11 2 -0xffb56048->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4 } +0xffdecdf8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56af03e4 'a') --> $$ = nterm item (0xffb56048 'a') -0x56af03e4->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56048 } -0x56af03e4->Object::Object { 0x56af03c4, 0x56af03d4, 0xffb56048 } -0xffb56048->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56048 } + $1 = token 'a' (0x57fce3e4 'a') +-> $$ = nterm item (0xffdecdf8 'a') +0x57fce3e4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecdf8 } +0x57fce3e4->Object::Object { 0x57fce3c4, 0x57fce3d4, 0xffdecdf8 } +0xffdecdf8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecdf8 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xffb55f8f->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4 } -0xffb56038->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f8f } -0xffb55f8f->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f8f, 0xffb56038 } -Next token is token 'a' (0xffb56038 'a') -0xffb55f88->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56038 } -0xffb56038->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f88, 0xffb56038 } -Shifting token 'a' (0xffb55f88 'a') -0x56af03f4->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f88 } -0xffb55f88->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb55f88 } +0xffdecd3f->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4 } +0xffdecde8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd3f } +0xffdecd3f->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd3f, 0xffdecde8 } +Next token is token 'a' (0xffdecde8 'a') +0xffdecd38->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecde8 } +0xffdecde8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd38, 0xffdecde8 } +Shifting token 'a' (0xffdecd38 'a') +0x57fce3f4->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd38 } +0xffdecd38->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecd38 } Entering state 2 Stack now 0 11 11 11 2 -0xffb56048->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4 } +0xffdecdf8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56af03f4 'a') --> $$ = nterm item (0xffb56048 'a') -0x56af03f4->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb56048 } -0x56af03f4->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56048 } -0xffb56048->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb56048 } + $1 = token 'a' (0x57fce3f4 'a') +-> $$ = nterm item (0xffdecdf8 'a') +0x57fce3f4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecdf8 } +0x57fce3f4->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecdf8 } +0xffdecdf8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecdf8 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xffb55f8f->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4 } -0xffb56038->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb55f8f } -0xffb55f8f->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb55f8f, 0xffb56038 } -Next token is token 'p' (0xffb56038 'p'Exception caught: cleaning lookahead and stack -0x56af03f4->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb56038 } -0x56af03e4->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56038 } -0x56af03d4->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb56038 } -0x56af03c4->Object::~Object { 0x56af03c4, 0xffb56038 } -0xffb56038->Object::~Object { 0xffb56038 } +0xffdecd3f->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4 } +0xffdecde8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecd3f } +0xffdecd3f->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecd3f, 0xffdecde8 } +Next token is token 'p' (0xffdecde8 'p'Exception caught: cleaning lookahead and stack +0x57fce3f4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecde8 } +0x57fce3e4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecde8 } +0x57fce3d4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecde8 } +0x57fce3c4->Object::~Object { 0x57fce3c4, 0xffdecde8 } +0xffdecde8->Object::~Object { 0xffdecde8 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reducing stack 0 by rule 1 (line 71): --> $$ = nterm prog (1.1: ) -Entering state 1 -Reading a token -Next token is token ID (3.0: ) -Shifting token ID (3.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.0: ) --> $$ = nterm expr (3.0: ) -Entering state 8 -Reading a token -Next token is token '+' (3.2: ) -Shifting token '+' (3.2: ) -Entering state 15 -Reading a token -Next token is token ID (3.4: ) -Shifting token ID (3.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.4: ) --> $$ = nterm expr (3.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (3.0: ) - $2 = token '+' (3.2: ) - $3 = nterm expr (3.4: ) --> $$ = nterm expr (3.0-4: ) -Entering state 8 -Reading a token -Next token is token ';' (3.5: ) -Shifting token ';' (3.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (3.0-4: ) - $2 = token ';' (3.5: ) --> $$ = nterm stmt (3.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1: ) - $2 = nterm stmt (3.0-5: ) --> $$ = nterm prog (1.1-3.5: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (5.0: ) -Shifting token TYPENAME (5.0: ) -Entering state 4 -Reading a token -Next token is token ID (5.2: ) -Shifting token ID (5.2: ) -Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (5.2: ) --> $$ = nterm declarator (5.2: ) -Entering state 13 -Reading a token -Next token is token ';' (5.3: ) -Shifting token ';' (5.3: ) -Entering state 23 -Reducing stack 0 by rule 11 (line 97): - $1 = token TYPENAME (5.0: ) - $2 = nterm declarator (5.2: ) - $3 = token ';' (5.3: ) --> $$ = nterm decl (5.0-3: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (5.0-3: ) --> $$ = nterm stmt (5.0-3: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-3.5: ) - $2 = nterm stmt (5.0-3: ) --> $$ = nterm prog (1.1-5.3: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (7.0: ) -Shifting token TYPENAME (7.0: ) -Entering state 4 -Reading a token -Next token is token ID (7.2: ) -Shifting token ID (7.2: ) -Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (7.2: ) --> $$ = nterm declarator (7.2: ) -Entering state 13 -Reading a token -Next token is token '=' (7.4: ) -Shifting token '=' (7.4: ) -Entering state 22 -Reading a token -Next token is token ID (7.6: ) -Shifting token ID (7.6: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (7.6: ) --> $$ = nterm expr (7.6: ) -Entering state 29 -Reading a token -Next token is token ';' (7.7: ) -Shifting token ';' (7.7: ) -Entering state 30 -Reducing stack 0 by rule 12 (line 99): - $1 = token TYPENAME (7.0: ) - $2 = nterm declarator (7.2: ) - $3 = token '=' (7.4: ) - $4 = nterm expr (7.6: ) - $5 = token ';' (7.7: ) --> $$ = nterm decl (7.0-7: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (7.0-7: ) --> $$ = nterm stmt (7.0-7: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-5.3: ) - $2 = nterm stmt (7.0-7: ) --> $$ = nterm prog (1.1-7.7: ) -Entering state 1 -Reading a token -Next token is token ID (9.0: ) -Shifting token ID (9.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.0: ) --> $$ = nterm expr (9.0: ) -Entering state 8 -Reading a token -Next token is token '=' (9.2: ) -Shifting token '=' (9.2: ) -Entering state 14 -Reading a token -Next token is token ID (9.4: ) -Shifting token ID (9.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.4: ) --> $$ = nterm expr (9.4: ) -Entering state 24 -Reading a token -Next token is token ';' (9.5: ) -Reducing stack 0 by rule 10 (line 94): - $1 = nterm expr (9.0: ) - $2 = token '=' (9.2: ) - $3 = nterm expr (9.4: ) --> $$ = nterm expr (9.0-4: ) -Entering state 8 -Next token is token ';' (9.5: ) -Shifting token ';' (9.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (9.0-4: ) - $2 = token ';' (9.5: ) --> $$ = nterm stmt (9.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-7.7: ) - $2 = nterm stmt (9.0-5: ) --> $$ = nterm prog (1.1-9.5: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (11.0: ) -Shifting token TYPENAME (11.0: ) -Entering state 4 -Reading a token -Next token is token '(' (11.2: ) -Shifting token '(' (11.2: ) -Entering state 12 -Reading a token -Next token is token ID (11.3: ) -Shifting token ID (11.3: ) -Entering state 18 -Reading a token -Next token is token ')' (11.4: ) -Stack 0 Entering state 18 -Next token is token ')' (11.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (11.4: ) -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -On stack 0, shifting token ')' (11.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (11.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '+' (11.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '+' (11.6: ) -Stack 1 dies. -Removing dead stacks. -On stack 0, shifting token '+' (11.6: ) -Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (11.3: ) --> $$ = nterm expr (11.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (11.0: ) - $2 = token '(' (11.2: ) - $3 = nterm expr (11.3: ) - $4 = token ')' (11.4: ) --> $$ = nterm expr (11.0-4: ) -Returning to deterministic operation. -Entering state 15 -Reading a token -Next token is token ID (11.8: ) -Shifting token ID (11.8: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (11.8: ) --> $$ = nterm expr (11.8: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (11.0-4: ) - $2 = token '+' (11.6: ) - $3 = nterm expr (11.8: ) --> $$ = nterm expr (11.0-8: ) -Entering state 8 -Reading a token -Next token is token ';' (11.9: ) -Shifting token ';' (11.9: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (11.0-8: ) - $2 = token ';' (11.9: ) --> $$ = nterm stmt (11.0-9: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-9.5: ) - $2 = nterm stmt (11.0-9: ) --> $$ = nterm prog (1.1-11.9: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (13.0: ) -Shifting token TYPENAME (13.0: ) -Entering state 4 -Reading a token -Next token is token '(' (13.2: ) -Shifting token '(' (13.2: ) -Entering state 12 -Reading a token -Next token is token ID (13.3: ) -Shifting token ID (13.3: ) -Entering state 18 -Reading a token -Next token is token ')' (13.4: ) -Stack 0 Entering state 18 -Next token is token ')' (13.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (13.4: ) -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -On stack 0, shifting token ')' (13.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (13.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token ';' (13.5: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token ';' (13.5: ) -On stack 0, shifting token ';' (13.5: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (13.5: ) -Stack 1 now in state 23 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (15.0: ) -Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (15.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (13.3: ) --> $$ = nterm declarator (13.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (13.2: ) - $2 = nterm declarator (13.3: ) - $3 = token ')' (13.4: ) --> $$ = nterm declarator (13.2-4: ) -Reducing stack -1 by rule 11 (line 97): - $1 = token TYPENAME (13.0: ) - $2 = nterm declarator (13.2-4: ) - $3 = token ';' (13.5: ) --> $$ = nterm decl (13.0-5: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (13.0-5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-11.9: ) - $2 = nterm stmt (13.0-5: ) --> $$ = nterm prog (1.1-13.5: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (15.2: ) -Shifting token '(' (15.2: ) -Entering state 12 -Reading a token -Next token is token ID (15.3: ) -Shifting token ID (15.3: ) -Entering state 18 -Reading a token -Next token is token ')' (15.4: ) -Stack 0 Entering state 18 -Next token is token ')' (15.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (15.4: ) -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -On stack 0, shifting token ')' (15.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (15.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '=' (15.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '=' (15.6: ) -On stack 0, shifting token '=' (15.6: ) -Stack 0 now in state 14 -On stack 1, shifting token '=' (15.6: ) -Stack 1 now in state 22 -Stack 0 Entering state 14 -Reading a token -Next token is token ID (15.8: ) -Stack 1 Entering state 22 -Next token is token ID (15.8: ) -On stack 0, shifting token ID (15.8: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.8: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token '+' (15.10: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token '+' (15.10: ) -On stack 0, shifting token '+' (15.10: ) -Stack 0 now in state 15 -On stack 1, shifting token '+' (15.10: ) -Stack 1 now in state 15 -Stack 0 Entering state 15 -Reading a token -Next token is token ID (15.12: ) -Stack 1 Entering state 15 -Next token is token ID (15.12: ) -On stack 0, shifting token ID (15.12: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.12: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. -Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token ';' (15.13: ) -Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. -Stack 0 Entering state 8 -Next token is token ';' (15.13: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. -Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token ';' (15.13: ) -On stack 0, shifting token ';' (15.13: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (15.13: ) -Stack 1 now in state 30 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (17.0: ) -Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (17.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (15.3: ) --> $$ = nterm declarator (15.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (15.2: ) - $2 = nterm declarator (15.3: ) - $3 = token ')' (15.4: ) --> $$ = nterm declarator (15.2-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 12 (line 99): - $1 = token TYPENAME (15.0: ) - $2 = nterm declarator (15.2-4: ) - $3 = token '=' (15.6: ) - $4 = nterm expr (15.8-12: ) - $5 = token ';' (15.13: ) --> $$ = nterm decl (15.0-13: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (15.0-13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-13.5: ) - $2 = nterm stmt (15.0-13: ) --> $$ = nterm prog (1.1-15.13: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (17.2: ) -Shifting token '(' (17.2: ) -Entering state 12 -Reading a token -Next token is token ID (17.3: ) -Shifting token ID (17.3: ) -Entering state 18 -Reading a token -Next token is token ID (17.5: ) -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (17.3: ) --> $$ = nterm expr (17.3: ) -Entering state 20 -Next token is token ID (17.5: ) -17.5: syntax error -Error: popping nterm expr (17.3: ) -Error: popping token '(' (17.2: ) -Error: popping token TYPENAME (17.0: ) -Shifting token error (17.0-5: ) -Entering state 3 -Next token is token ID (17.5: ) -Error: discarding token ID (17.5: ) -Reading a token -Next token is token ')' (17.6: ) -Error: discarding token ')' (17.6: ) -Reading a token -Next token is token '=' (17.8: ) -Error: discarding token '=' (17.8: ) -Reading a token -Next token is token ID (17.10: ) -Error: discarding token ID (17.10: ) -Reading a token -Next token is token '+' (17.12: ) -Error: discarding token '+' (17.12: ) -Reading a token -Next token is token ID (17.14: ) -Error: discarding token ID (17.14: ) -Reading a token -Next token is token ';' (17.15: ) -Entering state 3 -Next token is token ';' (17.15: ) -Shifting token ';' (17.15: ) -Entering state 10 -Reducing stack 0 by rule 5 (line 86): - $1 = token error (17.0-14: ) - $2 = token ';' (17.15: ) --> $$ = nterm stmt (17.0-15: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-15.13: ) - $2 = nterm stmt (17.0-15: ) --> $$ = nterm prog (1.1-17.15: ) -Entering state 1 -Reading a token -Next token is token ID (19.0: ) -Shifting token ID (19.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.0: ) --> $$ = nterm expr (19.0: ) -Entering state 8 -Reading a token -Next token is token '+' (19.2: ) -Shifting token '+' (19.2: ) -Entering state 15 -Reading a token -Next token is token ID (19.4: ) -Shifting token ID (19.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.4: ) --> $$ = nterm expr (19.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (19.0: ) - $2 = token '+' (19.2: ) - $3 = nterm expr (19.4: ) --> $$ = nterm expr (19.0-4: ) -Entering state 8 -Reading a token -Next token is token ';' (19.5: ) -Shifting token ';' (19.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (19.0-4: ) - $2 = token ';' (19.5: ) --> $$ = nterm stmt (19.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-17.15: ) - $2 = nterm stmt (19.0-5: ) --> $$ = nterm prog (1.1-19.5: ) -Entering state 1 -Reading a token -Next token is token '@' (21.0: ) -Shifting token '@' (21.0: ) -Entering state 6 -Reducing stack 0 by rule 6 (line 87): - $1 = token '@' (21.0: ) -Cleanup: popping nterm prog (1.1-19.5: ) -708. cxx-type.at:426: ok -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xffb55f8f->Object::Object { } -0xffb56038->Object::Object { 0xffb55f8f } -0xffb55f8f->Object::~Object { 0xffb55f8f, 0xffb56038 } -Next token is token 'a' (0xffb56038 'a') -0xffb55f88->Object::Object { 0xffb56038 } -0xffb56038->Object::~Object { 0xffb55f88, 0xffb56038 } -Shifting token 'a' (0xffb55f88 'a') -0x56af03c4->Object::Object { 0xffb55f88 } -0xffb55f88->Object::~Object { 0x56af03c4, 0xffb55f88 } +0xffdecd3f->Object::Object { } +0xffdecde8->Object::Object { 0xffdecd3f } +0xffdecd3f->Object::~Object { 0xffdecd3f, 0xffdecde8 } +Next token is token 'a' (0xffdecde8 'a') +0xffdecd38->Object::Object { 0xffdecde8 } +0xffdecde8->Object::~Object { 0xffdecd38, 0xffdecde8 } +Shifting token 'a' (0xffdecd38 'a') +0x57fce3c4->Object::Object { 0xffdecd38 } +0xffdecd38->Object::~Object { 0x57fce3c4, 0xffdecd38 } Entering state 2 Stack now 0 2 -0xffb56048->Object::Object { 0x56af03c4 } +0xffdecdf8->Object::Object { 0x57fce3c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56af03c4 'a') --> $$ = nterm item (0xffb56048 'a') -0x56af03c4->Object::~Object { 0x56af03c4, 0xffb56048 } -0x56af03c4->Object::Object { 0xffb56048 } -0xffb56048->Object::~Object { 0x56af03c4, 0xffb56048 } + $1 = token 'a' (0x57fce3c4 'a') +-> $$ = nterm item (0xffdecdf8 'a') +0x57fce3c4->Object::~Object { 0x57fce3c4, 0xffdecdf8 } +0x57fce3c4->Object::Object { 0xffdecdf8 } +0xffdecdf8->Object::~Object { 0x57fce3c4, 0xffdecdf8 } Entering state 11 Stack now 0 11 Reading a token -0xffb55f8f->Object::Object { 0x56af03c4 } -0xffb56038->Object::Object { 0x56af03c4, 0xffb55f8f } -0xffb55f8f->Object::~Object { 0x56af03c4, 0xffb55f8f, 0xffb56038 } -Next token is token 'a' (0xffb56038 'a') -0xffb55f88->Object::Object { 0x56af03c4, 0xffb56038 } -0xffb56038->Object::~Object { 0x56af03c4, 0xffb55f88, 0xffb56038 } -Shifting token 'a' (0xffb55f88 'a') -0x56af03d4->Object::Object { 0x56af03c4, 0xffb55f88 } -0xffb55f88->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb55f88 } +0xffdecd3f->Object::Object { 0x57fce3c4 } +0xffdecde8->Object::Object { 0x57fce3c4, 0xffdecd3f } +0xffdecd3f->Object::~Object { 0x57fce3c4, 0xffdecd3f, 0xffdecde8 } +Next token is token 'a' (0xffdecde8 'a') +0xffdecd38->Object::Object { 0x57fce3c4, 0xffdecde8 } +0xffdecde8->Object::~Object { 0x57fce3c4, 0xffdecd38, 0xffdecde8 } +Shifting token 'a' (0xffdecd38 'a') +0x57fce3d4->Object::Object { 0x57fce3c4, 0xffdecd38 } +0xffdecd38->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd38 } Entering state 2 Stack now 0 11 2 -0xffb56048->Object::Object { 0x56af03c4, 0x56af03d4 } +0xffdecdf8->Object::Object { 0x57fce3c4, 0x57fce3d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56af03d4 'a') --> $$ = nterm item (0xffb56048 'a') -0x56af03d4->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb56048 } -0x56af03d4->Object::Object { 0x56af03c4, 0xffb56048 } -0xffb56048->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb56048 } + $1 = token 'a' (0x57fce3d4 'a') +-> $$ = nterm item (0xffdecdf8 'a') +0x57fce3d4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecdf8 } +0x57fce3d4->Object::Object { 0x57fce3c4, 0xffdecdf8 } +0xffdecdf8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecdf8 } Entering state 11 Stack now 0 11 11 Reading a token -0xffb55f8f->Object::Object { 0x56af03c4, 0x56af03d4 } -0xffb56038->Object::Object { 0x56af03c4, 0x56af03d4, 0xffb55f8f } -0xffb55f8f->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb55f8f, 0xffb56038 } -Next token is token 'a' (0xffb56038 'a') -0xffb55f88->Object::Object { 0x56af03c4, 0x56af03d4, 0xffb56038 } -0xffb56038->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb55f88, 0xffb56038 } -Shifting token 'a' (0xffb55f88 'a') -0x56af03e4->Object::Object { 0x56af03c4, 0x56af03d4, 0xffb55f88 } -0xffb55f88->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f88 } +0xffdecd3f->Object::Object { 0x57fce3c4, 0x57fce3d4 } +0xffdecde8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd3f } +0xffdecd3f->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd3f, 0xffdecde8 } +Next token is token 'a' (0xffdecde8 'a') +0xffdecd38->Object::Object { 0x57fce3c4, 0x57fce3d4, 0xffdecde8 } +0xffdecde8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd38, 0xffdecde8 } +Shifting token 'a' (0xffdecd38 'a') +0x57fce3e4->Object::Object { 0x57fce3c4, 0x57fce3d4, 0xffdecd38 } +0xffdecd38->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd38 } Entering state 2 Stack now 0 11 11 2 -0xffb56048->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4 } +0xffdecdf8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56af03e4 'a') --> $$ = nterm item (0xffb56048 'a') -0x56af03e4->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56048 } -0x56af03e4->Object::Object { 0x56af03c4, 0x56af03d4, 0xffb56048 } -0xffb56048->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56048 } + $1 = token 'a' (0x57fce3e4 'a') +-> $$ = nterm item (0xffdecdf8 'a') +0x57fce3e4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecdf8 } +0x57fce3e4->Object::Object { 0x57fce3c4, 0x57fce3d4, 0xffdecdf8 } +0xffdecdf8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecdf8 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xffb55f8f->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4 } -0xffb56038->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f8f } -0xffb55f8f->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f8f, 0xffb56038 } -Next token is token 'a' (0xffb56038 'a') -0xffb55f88->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56038 } -0xffb56038->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f88, 0xffb56038 } -Shifting token 'a' (0xffb55f88 'a') -0x56af03f4->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb55f88 } -0xffb55f88->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb55f88 } +0xffdecd3f->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4 } +0xffdecde8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd3f } +0xffdecd3f->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd3f, 0xffdecde8 } +Next token is token 'a' (0xffdecde8 'a') +0xffdecd38->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecde8 } +0xffdecde8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd38, 0xffdecde8 } +Shifting token 'a' (0xffdecd38 'a') +0x57fce3f4->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecd38 } +0xffdecd38->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecd38 } Entering state 2 Stack now 0 11 11 11 2 -0xffb56048->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4 } +0xffdecdf8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x56af03f4 'a') --> $$ = nterm item (0xffb56048 'a') -0x56af03f4->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb56048 } -0x56af03f4->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56048 } -0xffb56048->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb56048 } + $1 = token 'a' (0x57fce3f4 'a') +-> $$ = nterm item (0xffdecdf8 'a') +0x57fce3f4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecdf8 } +0x57fce3f4->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecdf8 } +0xffdecdf8->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecdf8 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xffb55f8f->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4 } -0xffb56038->Object::Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb55f8f } -0xffb55f8f->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb55f8f, 0xffb56038 } -Next token is token 'p' (0xffb56038 'p'Exception caught: cleaning lookahead and stack -0x56af03f4->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0x56af03f4, 0xffb56038 } -0x56af03e4->Object::~Object { 0x56af03c4, 0x56af03d4, 0x56af03e4, 0xffb56038 } -0x56af03d4->Object::~Object { 0x56af03c4, 0x56af03d4, 0xffb56038 } -0x56af03c4->Object::~Object { 0x56af03c4, 0xffb56038 } -0xffb56038->Object::~Object { 0xffb56038 } +0xffdecd3f->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4 } +0xffdecde8->Object::Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecd3f } +0xffdecd3f->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecd3f, 0xffdecde8 } +Next token is token 'p' (0xffdecde8 'p'Exception caught: cleaning lookahead and stack +0x57fce3f4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0x57fce3f4, 0xffdecde8 } +0x57fce3e4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0x57fce3e4, 0xffdecde8 } +0x57fce3d4->Object::~Object { 0x57fce3c4, 0x57fce3d4, 0xffdecde8 } +0x57fce3c4->Object::~Object { 0x57fce3c4, 0xffdecde8 } +0xffdecde8->Object::~Object { 0xffdecde8 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr - +stdout: +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1362: $PREPARSER ./input aaaaR +./c++.at:857: $PREPARSER ./input +stderr: +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./cxx-type.at:435: $PREPARSER ./types test-input +stderr: +syntax error +./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:435: $PREPARSER ./types -p test-input stderr: -./c++.at:1363: $PREPARSER ./input i Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -263693,11 +260937,6 @@ Cleanup: popping nterm prog () ./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -stdout: -stderr: -stderr: -exception caught: printer Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -264306,131 +261545,15 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -./c++.at:1362: $PREPARSER ./input aaaae -./cxx-type.at:441: $PREPARSER ./types test-input -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -709. cxx-type.at:432: ok stderr: +709. cxx-type.at:432: ok +stdout: +./cxx-type.at:429: $PREPARSER ./types test-input stderr: 17.5: syntax error -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap -./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1362: $PREPARSER ./input aaaaE -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1363: $PREPARSER ./input --debug aaaap -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:441: $PREPARSER ./types -p test-input -stderr: -./c++.at:1362: $PREPARSER ./input aaaaT -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0xff867f3f->Object::Object { } -0xff867fe8->Object::Object { 0xff867f3f } -0xff867f3f->Object::~Object { 0xff867f3f, 0xff867fe8 } -Next token is token 'a' (0xff867fe8 'a') -0xff867f38->Object::Object { 0xff867fe8 } -0xff867fe8->Object::~Object { 0xff867f38, 0xff867fe8 } -Shifting token 'a' (0xff867f38 'a') -0x567143c4->Object::Object { 0xff867f38 } -0xff867f38->Object::~Object { 0x567143c4, 0xff867f38 } -Entering state 1 -Stack now 0 1 -0xff867ff8->Object::Object { 0x567143c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x567143c4 'a') --> $$ = nterm item (0xff867ff8 'a') -0x567143c4->Object::~Object { 0x567143c4, 0xff867ff8 } -0x567143c4->Object::Object { 0xff867ff8 } -0xff867ff8->Object::~Object { 0x567143c4, 0xff867ff8 } -Entering state 10 -Stack now 0 10 -Reading a token -0xff867f3f->Object::Object { 0x567143c4 } -0xff867fe8->Object::Object { 0x567143c4, 0xff867f3f } -0xff867f3f->Object::~Object { 0x567143c4, 0xff867f3f, 0xff867fe8 } -Next token is token 'a' (0xff867fe8 'a') -0xff867f38->Object::Object { 0x567143c4, 0xff867fe8 } -0xff867fe8->Object::~Object { 0x567143c4, 0xff867f38, 0xff867fe8 } -Shifting token 'a' (0xff867f38 'a') -0x567143d4->Object::Object { 0x567143c4, 0xff867f38 } -0xff867f38->Object::~Object { 0x567143c4, 0x567143d4, 0xff867f38 } -Entering state 1 -Stack now 0 10 1 -0xff867ff8->Object::Object { 0x567143c4, 0x567143d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x567143d4 'a') --> $$ = nterm item (0xff867ff8 'a') -0x567143d4->Object::~Object { 0x567143c4, 0x567143d4, 0xff867ff8 } -0x567143d4->Object::Object { 0x567143c4, 0xff867ff8 } -0xff867ff8->Object::~Object { 0x567143c4, 0x567143d4, 0xff867ff8 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xff867f3f->Object::Object { 0x567143c4, 0x567143d4 } -0xff867fe8->Object::Object { 0x567143c4, 0x567143d4, 0xff867f3f } -0xff867f3f->Object::~Object { 0x567143c4, 0x567143d4, 0xff867f3f, 0xff867fe8 } -Next token is token 'a' (0xff867fe8 'a') -0xff867f38->Object::Object { 0x567143c4, 0x567143d4, 0xff867fe8 } -0xff867fe8->Object::~Object { 0x567143c4, 0x567143d4, 0xff867f38, 0xff867fe8 } -Shifting token 'a' (0xff867f38 'a') -0x567143e4->Object::Object { 0x567143c4, 0x567143d4, 0xff867f38 } -0xff867f38->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f38 } -Entering state 1 -Stack now 0 10 10 1 -0xff867ff8->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x567143e4 'a') --> $$ = nterm item (0xff867ff8 'a') -0x567143e4->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867ff8 } -0x567143e4->Object::Object { 0x567143c4, 0x567143d4, 0xff867ff8 } -0xff867ff8->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867ff8 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xff867f3f->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4 } -0xff867fe8->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f3f } -0xff867f3f->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f3f, 0xff867fe8 } -Next token is token 'a' (0xff867fe8 'a') -0xff867f38->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867fe8 } -0xff867fe8->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f38, 0xff867fe8 } -Shifting token 'a' (0xff867f38 'a') -0x567143f4->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f38 } -0xff867f38->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867f38 } -Entering state 1 -Stack now 0 10 10 10 1 -0xff867ff8->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x567143f4 'a') --> $$ = nterm item (0xff867ff8 'a') -0x567143f4->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867ff8 } -0x567143f4->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867ff8 } -0xff867ff8->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867ff8 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xff867f3f->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4 } -0xff867fe8->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867f3f } -0xff867f3f->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867f3f, 0xff867fe8 } -Next token is token 'p' (0xff867fe8 'p'Exception caught: cleaning lookahead and stack -0x567143f4->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867fe8 } -0x567143e4->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867fe8 } -0x567143d4->Object::~Object { 0x567143c4, 0x567143d4, 0xff867fe8 } -0x567143c4->Object::~Object { 0x567143c4, 0xff867fe8 } -0xff867fe8->Object::~Object { 0xff867fe8 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:429: $PREPARSER ./types -p test-input stderr: Starting parse Entering state 0 @@ -264750,19 +261873,6 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (13.0-5: ) -> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (13.3: ) --> $$ = nterm expr (13.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (13.0: ) - $2 = token '(' (13.2: ) - $3 = nterm expr (13.3: ) - $4 = token ')' (13.4: ) --> $$ = nterm expr (13.0-4: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (13.0-4: ) - $2 = token ';' (13.5: ) --> $$ = nterm stmt (13.0-5: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-11.9: ) $2 = nterm stmt (13.0-5: ) @@ -264904,35 +262014,6 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (15.0-13: ) -> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.3: ) --> $$ = nterm expr (15.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (15.0: ) - $2 = token '(' (15.2: ) - $3 = nterm expr (15.3: ) - $4 = token ')' (15.4: ) --> $$ = nterm expr (15.0-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 10 (line 94): - $1 = nterm expr (15.0-4: ) - $2 = token '=' (15.6: ) - $3 = nterm expr (15.8-12: ) --> $$ = nterm expr (15.0-12: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (15.0-12: ) - $2 = token ';' (15.13: ) --> $$ = nterm stmt (15.0-13: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-13.5: ) $2 = nterm stmt (15.0-13: ) @@ -265040,117 +262121,10 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) +./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -0xff867f3f->Object::Object { } -0xff867fe8->Object::Object { 0xff867f3f } -0xff867f3f->Object::~Object { 0xff867f3f, 0xff867fe8 } -Next token is token 'a' (0xff867fe8 'a') -0xff867f38->Object::Object { 0xff867fe8 } -0xff867fe8->Object::~Object { 0xff867f38, 0xff867fe8 } -Shifting token 'a' (0xff867f38 'a') -0x567143c4->Object::Object { 0xff867f38 } -0xff867f38->Object::~Object { 0x567143c4, 0xff867f38 } -Entering state 1 -Stack now 0 1 -0xff867ff8->Object::Object { 0x567143c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x567143c4 'a') --> $$ = nterm item (0xff867ff8 'a') -0x567143c4->Object::~Object { 0x567143c4, 0xff867ff8 } -0x567143c4->Object::Object { 0xff867ff8 } -0xff867ff8->Object::~Object { 0x567143c4, 0xff867ff8 } -Entering state 10 -Stack now 0 10 -Reading a token -0xff867f3f->Object::Object { 0x567143c4 } -0xff867fe8->Object::Object { 0x567143c4, 0xff867f3f } -0xff867f3f->Object::~Object { 0x567143c4, 0xff867f3f, 0xff867fe8 } -Next token is token 'a' (0xff867fe8 'a') -0xff867f38->Object::Object { 0x567143c4, 0xff867fe8 } -0xff867fe8->Object::~Object { 0x567143c4, 0xff867f38, 0xff867fe8 } -Shifting token 'a' (0xff867f38 'a') -0x567143d4->Object::Object { 0x567143c4, 0xff867f38 } -0xff867f38->Object::~Object { 0x567143c4, 0x567143d4, 0xff867f38 } -Entering state 1 -Stack now 0 10 1 -0xff867ff8->Object::Object { 0x567143c4, 0x567143d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x567143d4 'a') --> $$ = nterm item (0xff867ff8 'a') -0x567143d4->Object::~Object { 0x567143c4, 0x567143d4, 0xff867ff8 } -0x567143d4->Object::Object { 0x567143c4, 0xff867ff8 } -0xff867ff8->Object::~Object { 0x567143c4, 0x567143d4, 0xff867ff8 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0xff867f3f->Object::Object { 0x567143c4, 0x567143d4 } -0xff867fe8->Object::Object { 0x567143c4, 0x567143d4, 0xff867f3f } -0xff867f3f->Object::~Object { 0x567143c4, 0x567143d4, 0xff867f3f, 0xff867fe8 } -Next token is token 'a' (0xff867fe8 'a') -0xff867f38->Object::Object { 0x567143c4, 0x567143d4, 0xff867fe8 } -0xff867fe8->Object::~Object { 0x567143c4, 0x567143d4, 0xff867f38, 0xff867fe8 } -Shifting token 'a' (0xff867f38 'a') -0x567143e4->Object::Object { 0x567143c4, 0x567143d4, 0xff867f38 } -0xff867f38->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f38 } -Entering state 1 -Stack now 0 10 10 1 -0xff867ff8->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x567143e4 'a') --> $$ = nterm item (0xff867ff8 'a') -0x567143e4->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867ff8 } -0x567143e4->Object::Object { 0x567143c4, 0x567143d4, 0xff867ff8 } -0xff867ff8->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867ff8 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0xff867f3f->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4 } -0xff867fe8->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f3f } -0xff867f3f->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f3f, 0xff867fe8 } -Next token is token 'a' (0xff867fe8 'a') -0xff867f38->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867fe8 } -0xff867fe8->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f38, 0xff867fe8 } -Shifting token 'a' (0xff867f38 'a') -0x567143f4->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867f38 } -0xff867f38->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867f38 } -Entering state 1 -Stack now 0 10 10 10 1 -0xff867ff8->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x567143f4 'a') --> $$ = nterm item (0xff867ff8 'a') -0x567143f4->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867ff8 } -0x567143f4->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867ff8 } -0xff867ff8->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867ff8 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0xff867f3f->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4 } -0xff867fe8->Object::Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867f3f } -0xff867f3f->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867f3f, 0xff867fe8 } -Next token is token 'p' (0xff867fe8 'p'Exception caught: cleaning lookahead and stack -0x567143f4->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0x567143f4, 0xff867fe8 } -0x567143e4->Object::~Object { 0x567143c4, 0x567143d4, 0x567143e4, 0xff867fe8 } -0x567143d4->Object::~Object { 0x567143c4, 0x567143d4, 0xff867fe8 } -0x567143c4->Object::~Object { 0x567143c4, 0xff867fe8 } -0xff867fe8->Object::~Object { 0xff867fe8 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -stderr: -stdout: -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 Reducing stack 0 by rule 1 (line 71): -> $$ = nterm prog (1.1: ) Entering state 1 @@ -265467,19 +262441,6 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (13.0-5: ) -> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (13.3: ) --> $$ = nterm expr (13.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (13.0: ) - $2 = token '(' (13.2: ) - $3 = nterm expr (13.3: ) - $4 = token ')' (13.4: ) --> $$ = nterm expr (13.0-4: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (13.0-4: ) - $2 = token ';' (13.5: ) --> $$ = nterm stmt (13.0-5: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-11.9: ) $2 = nterm stmt (13.0-5: ) @@ -265621,35 +262582,6 @@ Reducing stack -1 by rule 4 (line 85): $1 = nterm decl (15.0-13: ) -> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.3: ) --> $$ = nterm expr (15.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (15.0: ) - $2 = token '(' (15.2: ) - $3 = nterm expr (15.3: ) - $4 = token ')' (15.4: ) --> $$ = nterm expr (15.0-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 10 (line 94): - $1 = nterm expr (15.0-4: ) - $2 = token '=' (15.6: ) - $3 = nterm expr (15.8-12: ) --> $$ = nterm expr (15.0-12: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (15.0-12: ) - $2 = token ';' (15.13: ) --> $$ = nterm stmt (15.0-13: ) Reducing stack -1 by rule 2 (line 72): $1 = nterm prog (1.1-13.5: ) $2 = nterm stmt (15.0-13: ) @@ -265757,38 +262689,15 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -710. cxx-type.at:438: ok -stderr: -======== Testing with C++ standard flags: '' -exception caught: syntax error -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +708. cxx-type.at:426: ok -stderr: -./c++.at:1363: $PREPARSER ./input aaaaT -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1363: $PREPARSER ./input aaaaR -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ... ./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS 713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ... ./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ... -./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.c glr-regr1.y stderr: types.y:87.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] @@ -265799,1276 +262708,1278 @@ types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples ./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS stderr: stdout: -./c++.at:1555: $PREPARSER ./test +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -691. c++.at:1517: ok +stdout: +./c++.at:568: $here/modern +stdout: +Modern C++: 202100 +./c++.at:568: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +668. c++.at:568: ok -715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ... -./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y -./glr-regression.at:206: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS +714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ... +./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.c glr-regr1.y stderr: stdout: -./cxx-type.at:447: $PREPARSER ./types test-input +./cxx-type.at:441: $PREPARSER ./types test-input stderr: -syntax error -./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:447: $PREPARSER ./types -p test-input +17.5: syntax error +./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:441: $PREPARSER ./types -p test-input stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () +Reducing stack 0 by rule 1 (line 71): +-> $$ = nterm prog (1.1: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.0: ) +Shifting token ID (3.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.0: ) +-> $$ = nterm expr (3.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (3.2: ) +Shifting token '+' (3.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.4: ) +Shifting token ID (3.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.4: ) +-> $$ = nterm expr (3.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (3.0: ) + $2 = token '+' (3.2: ) + $3 = nterm expr (3.4: ) +-> $$ = nterm expr (3.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (3.5: ) +Shifting token ';' (3.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (3.0-4: ) + $2 = token ';' (3.5: ) +-> $$ = nterm stmt (3.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1: ) + $2 = nterm stmt (3.0-5: ) +-> $$ = nterm prog (1.1-3.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (5.0: ) +Shifting token TYPENAME (5.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (5.2: ) +Shifting token ID (5.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (5.2: ) +-> $$ = nterm declarator (5.2: ) Entering state 13 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (5.3: ) +Shifting token ';' (5.3: ) Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 11 (line 97): + $1 = token TYPENAME (5.0: ) + $2 = nterm declarator (5.2: ) + $3 = token ';' (5.3: ) +-> $$ = nterm decl (5.0-3: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (5.0-3: ) +-> $$ = nterm stmt (5.0-3: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-3.5: ) + $2 = nterm stmt (5.0-3: ) +-> $$ = nterm prog (1.1-5.3: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (7.0: ) +Shifting token TYPENAME (7.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.2: ) +Shifting token ID (7.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (7.2: ) +-> $$ = nterm declarator (7.2: ) Entering state 13 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.4: ) +Shifting token '=' (7.4: ) Entering state 22 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.6: ) +Shifting token ID (7.6: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (7.6: ) +-> $$ = nterm expr (7.6: ) Entering state 29 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (7.7: ) +Shifting token ';' (7.7: ) Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 12 (line 99): + $1 = token TYPENAME (7.0: ) + $2 = nterm declarator (7.2: ) + $3 = token '=' (7.4: ) + $4 = nterm expr (7.6: ) + $5 = token ';' (7.7: ) +-> $$ = nterm decl (7.0-7: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (7.0-7: ) +-> $$ = nterm stmt (7.0-7: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-5.3: ) + $2 = nterm stmt (7.0-7: ) +-> $$ = nterm prog (1.1-7.7: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.0: ) +Shifting token ID (9.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.0: ) +-> $$ = nterm expr (9.0: ) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.2: ) +Shifting token '=' (9.2: ) Entering state 14 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.4: ) +Shifting token ID (9.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.4: ) +-> $$ = nterm expr (9.4: ) Entering state 24 Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () +Next token is token ';' (9.5: ) +Reducing stack 0 by rule 10 (line 94): + $1 = nterm expr (9.0: ) + $2 = token '=' (9.2: ) + $3 = nterm expr (9.4: ) +-> $$ = nterm expr (9.0-4: ) Entering state 8 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (9.5: ) +Shifting token ';' (9.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (9.0-4: ) + $2 = token ';' (9.5: ) +-> $$ = nterm stmt (9.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-7.7: ) + $2 = nterm stmt (9.0-5: ) +-> $$ = nterm prog (1.1-9.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (11.0: ) +Shifting token TYPENAME (11.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (11.2: ) +Shifting token '(' (11.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.3: ) +Shifting token ID (11.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (11.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (11.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (11.4: ) +On stack 0, shifting token ')' (11.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (11.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' () +On stack 0, shifting token '+' (11.6: ) Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (11.3: ) +-> $$ = nterm expr (11.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (11.0: ) + $2 = token '(' (11.2: ) + $3 = nterm expr (11.3: ) + $4 = token ')' (11.4: ) +-> $$ = nterm expr (11.0-4: ) Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.8: ) +Shifting token ID (11.8: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (11.8: ) +-> $$ = nterm expr (11.8: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (11.0-4: ) + $2 = token '+' (11.6: ) + $3 = nterm expr (11.8: ) +-> $$ = nterm expr (11.0-8: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (11.9: ) +Shifting token ';' (11.9: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (11.0-8: ) + $2 = token ';' (11.9: ) +-> $$ = nterm stmt (11.0-9: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-9.5: ) + $2 = nterm stmt (11.0-9: ) +-> $$ = nterm prog (1.1-11.9: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (13.0: ) +Shifting token TYPENAME (13.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.2: ) +Shifting token '(' (13.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (13.3: ) +Shifting token ID (13.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (13.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (13.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (13.4: ) +On stack 0, shifting token ')' (13.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (13.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' () +Next token is token ';' (13.5: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (13.5: ) +On stack 0, shifting token ';' (13.5: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (13.5: ) Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (15.0: ) Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (15.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (13.3: ) +-> $$ = nterm declarator (13.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (13.2: ) + $2 = nterm declarator (13.3: ) + $3 = token ')' (13.4: ) +-> $$ = nterm declarator (13.2-4: ) +Reducing stack -1 by rule 11 (line 97): + $1 = token TYPENAME (13.0: ) + $2 = nterm declarator (13.2-4: ) + $3 = token ';' (13.5: ) +-> $$ = nterm decl (13.0-5: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (13.0-5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (13.3: ) +-> $$ = nterm expr (13.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (13.0: ) + $2 = token '(' (13.2: ) + $3 = nterm expr (13.3: ) + $4 = token ')' (13.4: ) +-> $$ = nterm expr (13.0-4: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (13.0-4: ) + $2 = token ';' (13.5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-11.9: ) + $2 = nterm stmt (13.0-5: ) +-> $$ = nterm prog (1.1-13.5: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (15.2: ) +Shifting token '(' (15.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (15.3: ) +Shifting token ID (15.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (15.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (15.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (15.4: ) +On stack 0, shifting token ')' (15.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (15.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' () +Next token is token '=' (15.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () +Next token is token '=' (15.6: ) +On stack 0, shifting token '=' (15.6: ) Stack 0 now in state 14 -On stack 1, shifting token '=' () +On stack 1, shifting token '=' (15.6: ) Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID () +Next token is token ID (15.8: ) Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.8: ) +On stack 0, shifting token ID (15.8: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.8: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' () +Next token is token '+' (15.10: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () +Next token is token '+' (15.10: ) +On stack 0, shifting token '+' (15.10: ) Stack 0 now in state 15 -On stack 1, shifting token '+' () +On stack 1, shifting token '+' (15.10: ) Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID () +Next token is token ID (15.12: ) Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.12: ) +On stack 0, shifting token ID (15.12: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.12: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Next token is token ';' (15.13: ) +Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' () +Next token is token ';' (15.13: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (15.13: ) +On stack 0, shifting token ';' (15.13: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (15.13: ) Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (17.0: ) Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (17.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (15.3: ) +-> $$ = nterm declarator (15.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (15.2: ) + $2 = nterm declarator (15.3: ) + $3 = token ')' (15.4: ) +-> $$ = nterm declarator (15.2-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 12 (line 99): + $1 = token TYPENAME (15.0: ) + $2 = nterm declarator (15.2-4: ) + $3 = token '=' (15.6: ) + $4 = nterm expr (15.8-12: ) + $5 = token ';' (15.13: ) +-> $$ = nterm decl (15.0-13: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (15.0-13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.3: ) +-> $$ = nterm expr (15.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (15.0: ) + $2 = token '(' (15.2: ) + $3 = nterm expr (15.3: ) + $4 = token ')' (15.4: ) +-> $$ = nterm expr (15.0-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 10 (line 94): + $1 = nterm expr (15.0-4: ) + $2 = token '=' (15.6: ) + $3 = nterm expr (15.8-12: ) +-> $$ = nterm expr (15.0-12: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (15.0-12: ) + $2 = token ';' (15.13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-13.5: ) + $2 = nterm stmt (15.0-13: ) +-> $$ = nterm prog (1.1-15.13: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (17.2: ) +Shifting token '(' (17.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (17.3: ) +Shifting token ID (17.3: ) Entering state 18 Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Next token is token ID (17.5: ) +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (17.3: ) +-> $$ = nterm expr (17.3: ) Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () +Next token is token ID (17.5: ) +17.5: syntax error +Error: popping nterm expr (17.3: ) +Error: popping token '(' (17.2: ) +Error: popping token TYPENAME (17.0: ) +Shifting token error (17.0-5: ) Entering state 3 -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.5: ) +Error: discarding token ID (17.5: ) Reading a token -Next token is token ')' () -Error: discarding token ')' () +Next token is token ')' (17.6: ) +Error: discarding token ')' (17.6: ) Reading a token -Next token is token '=' () -Error: discarding token '=' () +Next token is token '=' (17.8: ) +Error: discarding token '=' (17.8: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.10: ) +Error: discarding token ID (17.10: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '+' (17.12: ) +Error: discarding token '+' (17.12: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.14: ) +Error: discarding token ID (17.14: ) Reading a token -Next token is token ';' () +Next token is token ';' (17.15: ) Entering state 3 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (17.15: ) +Shifting token ';' (17.15: ) Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 5 (line 86): + $1 = token error (17.0-14: ) + $2 = token ';' (17.15: ) +-> $$ = nterm stmt (17.0-15: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-15.13: ) + $2 = nterm stmt (17.0-15: ) +-> $$ = nterm prog (1.1-17.15: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.0: ) +Shifting token ID (19.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.0: ) +-> $$ = nterm expr (19.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (19.2: ) +Shifting token '+' (19.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.4: ) +Shifting token ID (19.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.4: ) +-> $$ = nterm expr (19.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (19.0: ) + $2 = token '+' (19.2: ) + $3 = nterm expr (19.4: ) +-> $$ = nterm expr (19.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (19.5: ) +Shifting token ';' (19.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (19.0-4: ) + $2 = token ';' (19.5: ) +-> $$ = nterm stmt (19.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-17.15: ) + $2 = nterm stmt (19.0-5: ) +-> $$ = nterm prog (1.1-19.5: ) Entering state 1 Reading a token -Next token is token '@' () -Shifting token '@' () +Next token is token '@' (21.0: ) +Shifting token '@' (21.0: ) Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Reducing stack 0 by rule 6 (line 87): + $1 = token '@' (21.0: ) +Cleanup: popping nterm prog (1.1-19.5: ) +./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () +Reducing stack 0 by rule 1 (line 71): +-> $$ = nterm prog (1.1: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.0: ) +Shifting token ID (3.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.0: ) +-> $$ = nterm expr (3.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (3.2: ) +Shifting token '+' (3.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.4: ) +Shifting token ID (3.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.4: ) +-> $$ = nterm expr (3.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (3.0: ) + $2 = token '+' (3.2: ) + $3 = nterm expr (3.4: ) +-> $$ = nterm expr (3.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (3.5: ) +Shifting token ';' (3.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (3.0-4: ) + $2 = token ';' (3.5: ) +-> $$ = nterm stmt (3.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1: ) + $2 = nterm stmt (3.0-5: ) +-> $$ = nterm prog (1.1-3.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (5.0: ) +Shifting token TYPENAME (5.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (5.2: ) +Shifting token ID (5.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (5.2: ) +-> $$ = nterm declarator (5.2: ) Entering state 13 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (5.3: ) +Shifting token ';' (5.3: ) Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 11 (line 97): + $1 = token TYPENAME (5.0: ) + $2 = nterm declarator (5.2: ) + $3 = token ';' (5.3: ) +-> $$ = nterm decl (5.0-3: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (5.0-3: ) +-> $$ = nterm stmt (5.0-3: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-3.5: ) + $2 = nterm stmt (5.0-3: ) +-> $$ = nterm prog (1.1-5.3: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (7.0: ) +Shifting token TYPENAME (7.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.2: ) +Shifting token ID (7.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (7.2: ) +-> $$ = nterm declarator (7.2: ) Entering state 13 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.4: ) +Shifting token '=' (7.4: ) Entering state 22 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.6: ) +Shifting token ID (7.6: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (7.6: ) +-> $$ = nterm expr (7.6: ) Entering state 29 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (7.7: ) +Shifting token ';' (7.7: ) Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 12 (line 99): + $1 = token TYPENAME (7.0: ) + $2 = nterm declarator (7.2: ) + $3 = token '=' (7.4: ) + $4 = nterm expr (7.6: ) + $5 = token ';' (7.7: ) +-> $$ = nterm decl (7.0-7: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (7.0-7: ) +-> $$ = nterm stmt (7.0-7: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-5.3: ) + $2 = nterm stmt (7.0-7: ) +-> $$ = nterm prog (1.1-7.7: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.0: ) +Shifting token ID (9.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.0: ) +-> $$ = nterm expr (9.0: ) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.2: ) +Shifting token '=' (9.2: ) Entering state 14 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.4: ) +Shifting token ID (9.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.4: ) +-> $$ = nterm expr (9.4: ) Entering state 24 Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () +Next token is token ';' (9.5: ) +Reducing stack 0 by rule 10 (line 94): + $1 = nterm expr (9.0: ) + $2 = token '=' (9.2: ) + $3 = nterm expr (9.4: ) +-> $$ = nterm expr (9.0-4: ) Entering state 8 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (9.5: ) +Shifting token ';' (9.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (9.0-4: ) + $2 = token ';' (9.5: ) +-> $$ = nterm stmt (9.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-7.7: ) + $2 = nterm stmt (9.0-5: ) +-> $$ = nterm prog (1.1-9.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (11.0: ) +Shifting token TYPENAME (11.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (11.2: ) +Shifting token '(' (11.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.3: ) +Shifting token ID (11.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (11.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (11.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (11.4: ) +On stack 0, shifting token ')' (11.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (11.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' () +On stack 0, shifting token '+' (11.6: ) Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (11.3: ) +-> $$ = nterm expr (11.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (11.0: ) + $2 = token '(' (11.2: ) + $3 = nterm expr (11.3: ) + $4 = token ')' (11.4: ) +-> $$ = nterm expr (11.0-4: ) Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.8: ) +Shifting token ID (11.8: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (11.8: ) +-> $$ = nterm expr (11.8: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (11.0-4: ) + $2 = token '+' (11.6: ) + $3 = nterm expr (11.8: ) +-> $$ = nterm expr (11.0-8: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (11.9: ) +Shifting token ';' (11.9: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (11.0-8: ) + $2 = token ';' (11.9: ) +-> $$ = nterm stmt (11.0-9: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-9.5: ) + $2 = nterm stmt (11.0-9: ) +-> $$ = nterm prog (1.1-11.9: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (13.0: ) +Shifting token TYPENAME (13.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.2: ) +Shifting token '(' (13.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (13.3: ) +Shifting token ID (13.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (13.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (13.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (13.4: ) +On stack 0, shifting token ')' (13.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (13.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' () +Next token is token ';' (13.5: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (13.5: ) +On stack 0, shifting token ';' (13.5: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (13.5: ) Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (15.0: ) Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (15.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (13.3: ) +-> $$ = nterm declarator (13.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (13.2: ) + $2 = nterm declarator (13.3: ) + $3 = token ')' (13.4: ) +-> $$ = nterm declarator (13.2-4: ) +Reducing stack -1 by rule 11 (line 97): + $1 = token TYPENAME (13.0: ) + $2 = nterm declarator (13.2-4: ) + $3 = token ';' (13.5: ) +-> $$ = nterm decl (13.0-5: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (13.0-5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (13.3: ) +-> $$ = nterm expr (13.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (13.0: ) + $2 = token '(' (13.2: ) + $3 = nterm expr (13.3: ) + $4 = token ')' (13.4: ) +-> $$ = nterm expr (13.0-4: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (13.0-4: ) + $2 = token ';' (13.5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-11.9: ) + $2 = nterm stmt (13.0-5: ) +-> $$ = nterm prog (1.1-13.5: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (15.2: ) +Shifting token '(' (15.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (15.3: ) +Shifting token ID (15.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (15.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (15.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (15.4: ) +On stack 0, shifting token ')' (15.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (15.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' () +Next token is token '=' (15.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () +Next token is token '=' (15.6: ) +On stack 0, shifting token '=' (15.6: ) Stack 0 now in state 14 -On stack 1, shifting token '=' () +On stack 1, shifting token '=' (15.6: ) Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID () +Next token is token ID (15.8: ) Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.8: ) +On stack 0, shifting token ID (15.8: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.8: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' () +Next token is token '+' (15.10: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () +Next token is token '+' (15.10: ) +On stack 0, shifting token '+' (15.10: ) Stack 0 now in state 15 -On stack 1, shifting token '+' () +On stack 1, shifting token '+' (15.10: ) Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID () +Next token is token ID (15.12: ) Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.12: ) +On stack 0, shifting token ID (15.12: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.12: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Next token is token ';' (15.13: ) +Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' () +Next token is token ';' (15.13: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (15.13: ) +On stack 0, shifting token ';' (15.13: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (15.13: ) Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (17.0: ) Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (17.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (15.3: ) +-> $$ = nterm declarator (15.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (15.2: ) + $2 = nterm declarator (15.3: ) + $3 = token ')' (15.4: ) +-> $$ = nterm declarator (15.2-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 12 (line 99): + $1 = token TYPENAME (15.0: ) + $2 = nterm declarator (15.2-4: ) + $3 = token '=' (15.6: ) + $4 = nterm expr (15.8-12: ) + $5 = token ';' (15.13: ) +-> $$ = nterm decl (15.0-13: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (15.0-13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.3: ) +-> $$ = nterm expr (15.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (15.0: ) + $2 = token '(' (15.2: ) + $3 = nterm expr (15.3: ) + $4 = token ')' (15.4: ) +-> $$ = nterm expr (15.0-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 10 (line 94): + $1 = nterm expr (15.0-4: ) + $2 = token '=' (15.6: ) + $3 = nterm expr (15.8-12: ) +-> $$ = nterm expr (15.0-12: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (15.0-12: ) + $2 = token ';' (15.13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-13.5: ) + $2 = nterm stmt (15.0-13: ) +-> $$ = nterm prog (1.1-15.13: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (17.2: ) +Shifting token '(' (17.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (17.3: ) +Shifting token ID (17.3: ) Entering state 18 Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Next token is token ID (17.5: ) +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (17.3: ) +-> $$ = nterm expr (17.3: ) Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () +Next token is token ID (17.5: ) +17.5: syntax error +Error: popping nterm expr (17.3: ) +Error: popping token '(' (17.2: ) +Error: popping token TYPENAME (17.0: ) +Shifting token error (17.0-5: ) Entering state 3 -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.5: ) +Error: discarding token ID (17.5: ) Reading a token -Next token is token ')' () -Error: discarding token ')' () +Next token is token ')' (17.6: ) +Error: discarding token ')' (17.6: ) Reading a token -Next token is token '=' () -Error: discarding token '=' () +Next token is token '=' (17.8: ) +Error: discarding token '=' (17.8: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.10: ) +Error: discarding token ID (17.10: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '+' (17.12: ) +Error: discarding token '+' (17.12: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.14: ) +Error: discarding token ID (17.14: ) Reading a token -Next token is token ';' () +Next token is token ';' (17.15: ) Entering state 3 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (17.15: ) +Shifting token ';' (17.15: ) Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 5 (line 86): + $1 = token error (17.0-14: ) + $2 = token ';' (17.15: ) +-> $$ = nterm stmt (17.0-15: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-15.13: ) + $2 = nterm stmt (17.0-15: ) +-> $$ = nterm prog (1.1-17.15: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.0: ) +Shifting token ID (19.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.0: ) +-> $$ = nterm expr (19.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (19.2: ) +Shifting token '+' (19.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.4: ) +Shifting token ID (19.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.4: ) +-> $$ = nterm expr (19.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (19.0: ) + $2 = token '+' (19.2: ) + $3 = nterm expr (19.4: ) +-> $$ = nterm expr (19.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (19.5: ) +Shifting token ';' (19.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (19.0-4: ) + $2 = token ';' (19.5: ) +-> $$ = nterm stmt (19.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-17.15: ) + $2 = nterm stmt (19.0-5: ) +-> $$ = nterm prog (1.1-19.5: ) Entering state 1 Reading a token -Next token is token '@' () -Shifting token '@' () +Next token is token '@' (21.0: ) +Shifting token '@' (21.0: ) Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -711. cxx-type.at:444: ok - -716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ... -./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y -./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS -stderr: -stdout: -./c++.at:859: $PREPARSER ./input -stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: +Reducing stack 0 by rule 6 (line 87): + $1 = token '@' (21.0: ) +Cleanup: popping nterm prog (1.1-19.5: ) +710. cxx-type.at:438: stderr: + ok stdout: -./glr-regression.at:205: $PREPARSER ./glr-regr1 BPBPB +./cxx-type.at:447: $PREPARSER ./types test-input stderr: -./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -714. glr-regression.at:205: ok +syntax error +./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -717. glr-regression.at:354: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.c ... -./glr-regression.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.c glr-regr2a.y -./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS -stderr: -stdout: -./cxx-type.at:458: $PREPARSER ./types test-input -stderr: -syntax error, unexpected ID, expecting '=' or '+' or ')' -stderr: -./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./cxx-type.at:458: $PREPARSER ./types -p test-input -./cxx-type.at:452: $PREPARSER ./types test-input -stderr: +./cxx-type.at:447: $PREPARSER ./types -p test-input stderr: Starting parse Entering state 0 @@ -267592,7 +264503,7 @@ -> $$ = nterm expr () Entering state 20 Next token is token ID () -syntax error, unexpected ID, expecting '=' or '+' or ')' +syntax error Error: popping nterm expr () Error: popping token '(' () Error: popping token TYPENAME () @@ -267678,9 +264589,7 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -17.5: syntax error -./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -268204,7 +265113,7 @@ -> $$ = nterm expr () Entering state 20 Next token is token ID () -syntax error, unexpected ID, expecting '=' or '+' or ')' +syntax error Error: popping nterm expr () Error: popping token '(' () Error: popping token TYPENAME () @@ -268290,9 +265199,126 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -713. cxx-type.at:455: ok -./cxx-type.at:452: $PREPARSER ./types -p test-input +711. cxx-type.at:444: ok + +stderr: +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ... +./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y +716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ... +./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y +./glr-regression.at:206: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS +./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS +stderr: +stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:567: $here/modern +stdout: +Modern C++: 202100 +./c++.at:567: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +667. c++.at:567: ok + +stderr: +stdout: +./existing.at:808: $PREPARSER ./input +stderr: +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +621. existing.at:808: ok +717. glr-regression.at:354: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.c ... +./glr-regression.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.c glr-regr2a.y +stderr: +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ... +./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y +stderr: +stdout: +./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS +./c++.at:570: $here/modern +stdout: +Modern C++: 202100 +./c++.at:570: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +stdout: +./cxx-type.at:452: $PREPARSER ./types test-input +./c++.at:1555: $PREPARSER ./test +stderr: +17.5: syntax error +stderr: +./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +670. c++.at:570: ok +======== Testing with C++ standard flags: '' +./cxx-type.at:452: $PREPARSER ./types -p test-input +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./glr-regression.at:355: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS stderr: Starting parse Entering state 0 @@ -268903,6 +265929,7 @@ $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) ./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: Starting parse Entering state 0 @@ -269513,57 +266540,1525 @@ $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) 712. cxx-type.at:449: ok +stderr: -718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ... -./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y +stdout: +./cxx-type.at:458: $PREPARSER ./types test-input +stderr: +syntax error, unexpected ID, expecting '=' or '+' or ')' +./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:458: $PREPARSER ./types -p test-input +stderr: +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 23 +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 22 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 29 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 30 +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 14 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 24 +Reading a token +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '+' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '+' () +Stack 1 dies. +Removing dead stacks. +On stack 0, shifting token '+' () +Stack 0 now in state 15 +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Returning to deterministic operation. +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token ';' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 23 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 23 +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '=' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '=' () +On stack 0, shifting token '=' () +Stack 0 now in state 14 +On stack 1, shifting token '=' () +Stack 1 now in state 22 +Stack 0 Entering state 14 +Reading a token +Next token is token ID () +Stack 1 Entering state 22 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token '+' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token '+' () +On stack 0, shifting token '+' () +Stack 0 now in state 15 +On stack 1, shifting token '+' () +Stack 1 now in state 15 +Stack 0 Entering state 15 +Reading a token +Next token is token ID () +Stack 1 Entering state 15 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Stack 0 Entering state 25 +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Stack 0 Entering state 8 +Next token is token ';' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Stack 1 Entering state 25 +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 30 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 30 +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 20 +Next token is token ID () +syntax error, unexpected ID, expecting '=' or '+' or ')' +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () +Entering state 3 +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ')' () +Error: discarding token ')' () +Reading a token +Next token is token '=' () +Error: discarding token '=' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ';' () +Entering state 3 +Next token is token ';' () +Shifting token ';' () +Entering state 10 +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token '@' () +Shifting token '@' () +Entering state 6 +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 23 +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 22 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 29 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 30 +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 14 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 24 +Reading a token +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '+' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '+' () +Stack 1 dies. +Removing dead stacks. +On stack 0, shifting token '+' () +Stack 0 now in state 15 +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Returning to deterministic operation. +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token ';' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 23 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 23 +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '=' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '=' () +On stack 0, shifting token '=' () +Stack 0 now in state 14 +On stack 1, shifting token '=' () +Stack 1 now in state 22 +Stack 0 Entering state 14 +Reading a token +Next token is token ID () +Stack 1 Entering state 22 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token '+' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token '+' () +On stack 0, shifting token '+' () +Stack 0 now in state 15 +On stack 1, shifting token '+' () +Stack 1 now in state 15 +Stack 0 Entering state 15 +Reading a token +Next token is token ID () +Stack 1 Entering state 15 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Stack 0 Entering state 25 +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Stack 0 Entering state 8 +Next token is token ';' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Stack 1 Entering state 25 +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 30 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 30 +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 20 +Next token is token ID () +syntax error, unexpected ID, expecting '=' or '+' or ')' +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () +Entering state 3 +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ')' () +Error: discarding token ')' () +Reading a token +Next token is token '=' () +Error: discarding token '=' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ';' () +Entering state 3 +Next token is token ';' () +Shifting token ';' () +Entering state 10 +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token '@' () +Shifting token '@' () +Entering state 6 +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +713. cxx-type.at:455: ok + +stderr: +stdout: +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y 719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ... ./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y -./glr-regression.at:355: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS +720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ... +./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y +721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ... +./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y ./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS +./glr-regression.at:489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS stderr: stdout: -./c++.at:859: $PREPARSER ./input +./glr-regression.at:205: $PREPARSER ./glr-regr1 BPBPB stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +714. glr-regression.at:205: ok + +stderr: +stdout: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ... +./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y +./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS +stderr: +stdout: +./c++.at:572: $here/modern +stdout: +Modern C++: 202100 +./c++.at:572: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +672. c++.at:572: ok +stderr: +stdout: +./c++.at:851: $PREPARSER ./input + +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ... +./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y stderr: stdout: -./glr-regression.at:206: $PREPARSER ./glr-regr1 BPBPB +./c++.at:571: $here/modern +./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS +stdout: +Modern C++: 202100 +./c++.at:571: $PREPARSER ./list stderr: -./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -715. glr-regression.at:206: ok +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +671. c++.at:571: ok +stdout: +./c++.at:1361: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5733ea00->Object::Object { } +Next token is token 'a' (0x5733ea00 'a') +Shifting token 'a' (0x5733ea00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5733ea00 'a') +-> $$ = nterm item (0x5733ea00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x5733ea30->Object::Object { 0x5733ea00 } +Next token is token 'a' (0x5733ea30 'a') +Shifting token 'a' (0x5733ea30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5733ea30 'a') +-> $$ = nterm item (0x5733ea30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x5733ea60->Object::Object { 0x5733ea00, 0x5733ea30 } +Next token is token 'a' (0x5733ea60 'a') +Shifting token 'a' (0x5733ea60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5733ea60 'a') +-> $$ = nterm item (0x5733ea60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x5733ea90->Object::Object { 0x5733ea00, 0x5733ea30, 0x5733ea60 } +Next token is token 'a' (0x5733ea90 'a') +Shifting token 'a' (0x5733ea90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5733ea90 'a') +-> $$ = nterm item (0x5733ea90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x5733eac0->Object::Object { 0x5733ea00, 0x5733ea30, 0x5733ea60, 0x5733ea90 } +Next token is token 'p' (0x5733eac0 'p'Exception caught: cleaning lookahead and stack +0x5733eac0->Object::~Object { 0x5733ea00, 0x5733ea30, 0x5733ea60, 0x5733ea90, 0x5733eac0 } +0x5733ea90->Object::~Object { 0x5733ea00, 0x5733ea30, 0x5733ea60, 0x5733ea90 } +0x5733ea60->Object::~Object { 0x5733ea00, 0x5733ea30, 0x5733ea60 } +0x5733ea30->Object::~Object { 0x5733ea00, 0x5733ea30 } +0x5733ea00->Object::~Object { 0x5733ea00 } +exception caught: printer +end { } +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x5733ea00->Object::Object { } +Next token is token 'a' (0x5733ea00 'a') +Shifting token 'a' (0x5733ea00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5733ea00 'a') +-> $$ = nterm item (0x5733ea00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x5733ea30->Object::Object { 0x5733ea00 } +Next token is token 'a' (0x5733ea30 'a') +Shifting token 'a' (0x5733ea30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5733ea30 'a') +-> $$ = nterm item (0x5733ea30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x5733ea60->Object::Object { 0x5733ea00, 0x5733ea30 } +Next token is token 'a' (0x5733ea60 'a') +Shifting token 'a' (0x5733ea60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5733ea60 'a') +-> $$ = nterm item (0x5733ea60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x5733ea90->Object::Object { 0x5733ea00, 0x5733ea30, 0x5733ea60 } +Next token is token 'a' (0x5733ea90 'a') +Shifting token 'a' (0x5733ea90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x5733ea90 'a') +-> $$ = nterm item (0x5733ea90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x5733eac0->Object::Object { 0x5733ea00, 0x5733ea30, 0x5733ea60, 0x5733ea90 } +Next token is token 'p' (0x5733eac0 'p'Exception caught: cleaning lookahead and stack +0x5733eac0->Object::~Object { 0x5733ea00, 0x5733ea30, 0x5733ea60, 0x5733ea90, 0x5733eac0 } +0x5733ea90->Object::~Object { 0x5733ea00, 0x5733ea30, 0x5733ea60, 0x5733ea90 } +0x5733ea60->Object::~Object { 0x5733ea00, 0x5733ea30, 0x5733ea60 } +0x5733ea30->Object::~Object { 0x5733ea00, 0x5733ea30 } +0x5733ea00->Object::~Object { 0x5733ea00 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ... +./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +./c++.at:1361: $PREPARSER ./input aaaaT +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./glr-regression.at:354: $PREPARSER ./glr-regr2a input1.txt stderr: +./glr-regression.at:593: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS ./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ... -./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y ./glr-regression.at:354: $PREPARSER ./glr-regr2a input2.txt stderr: +stderr: +stdout: +./c++.at:857: $PREPARSER ./input ./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./glr-regression.at:354: $PREPARSER ./glr-regr2a input3.txt stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 717. glr-regression.at:354: ok - -./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS -721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ... -./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y -./glr-regression.at:489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS stderr: stdout: ./c++.at:1363: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ./c++.at:1363: $PREPARSER ./input aaaal stderr: exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./c++.at:1363: $PREPARSER ./input i +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: exception caught: initial-action ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -269576,99 +268071,99 @@ Entering state 0 Stack now 0 Reading a token -0xffa5edef->Object::Object { } -0xffa5ee98->Object::Object { 0xffa5edef } -0xffa5edef->Object::~Object { 0xffa5edef, 0xffa5ee98 } -Next token is token 'a' (0xffa5ee98 'a') -0xffa5ede8->Object::Object { 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0xffa5ede8, 0xffa5ee98 } -Shifting token 'a' (0xffa5ede8 'a') -0x578e93c4->Object::Object { 0xffa5ede8 } -0xffa5ede8->Object::~Object { 0x578e93c4, 0xffa5ede8 } +0xff92fa3f->Object::Object { } +0xff92fae8->Object::Object { 0xff92fa3f } +0xff92fa3f->Object::~Object { 0xff92fa3f, 0xff92fae8 } +Next token is token 'a' (0xff92fae8 'a') +0xff92fa38->Object::Object { 0xff92fae8 } +0xff92fae8->Object::~Object { 0xff92fa38, 0xff92fae8 } +Shifting token 'a' (0xff92fa38 'a') +0x567f83c4->Object::Object { 0xff92fa38 } +0xff92fa38->Object::~Object { 0x567f83c4, 0xff92fa38 } Entering state 1 Stack now 0 1 -0xffa5eea8->Object::Object { 0x578e93c4 } +0xff92faf8->Object::Object { 0x567f83c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578e93c4 'a') --> $$ = nterm item (0xffa5eea8 'a') -0x578e93c4->Object::~Object { 0x578e93c4, 0xffa5eea8 } -0x578e93c4->Object::Object { 0xffa5eea8 } -0xffa5eea8->Object::~Object { 0x578e93c4, 0xffa5eea8 } + $1 = token 'a' (0x567f83c4 'a') +-> $$ = nterm item (0xff92faf8 'a') +0x567f83c4->Object::~Object { 0x567f83c4, 0xff92faf8 } +0x567f83c4->Object::Object { 0xff92faf8 } +0xff92faf8->Object::~Object { 0x567f83c4, 0xff92faf8 } Entering state 10 Stack now 0 10 Reading a token -0xffa5edef->Object::Object { 0x578e93c4 } -0xffa5ee98->Object::Object { 0x578e93c4, 0xffa5edef } -0xffa5edef->Object::~Object { 0x578e93c4, 0xffa5edef, 0xffa5ee98 } -Next token is token 'a' (0xffa5ee98 'a') -0xffa5ede8->Object::Object { 0x578e93c4, 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0x578e93c4, 0xffa5ede8, 0xffa5ee98 } -Shifting token 'a' (0xffa5ede8 'a') -0x578e93d4->Object::Object { 0x578e93c4, 0xffa5ede8 } -0xffa5ede8->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5ede8 } +0xff92fa3f->Object::Object { 0x567f83c4 } +0xff92fae8->Object::Object { 0x567f83c4, 0xff92fa3f } +0xff92fa3f->Object::~Object { 0x567f83c4, 0xff92fa3f, 0xff92fae8 } +Next token is token 'a' (0xff92fae8 'a') +0xff92fa38->Object::Object { 0x567f83c4, 0xff92fae8 } +0xff92fae8->Object::~Object { 0x567f83c4, 0xff92fa38, 0xff92fae8 } +Shifting token 'a' (0xff92fa38 'a') +0x567f83d4->Object::Object { 0x567f83c4, 0xff92fa38 } +0xff92fa38->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92fa38 } Entering state 1 Stack now 0 10 1 -0xffa5eea8->Object::Object { 0x578e93c4, 0x578e93d4 } +0xff92faf8->Object::Object { 0x567f83c4, 0x567f83d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578e93d4 'a') --> $$ = nterm item (0xffa5eea8 'a') -0x578e93d4->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5eea8 } -0x578e93d4->Object::Object { 0x578e93c4, 0xffa5eea8 } -0xffa5eea8->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5eea8 } + $1 = token 'a' (0x567f83d4 'a') +-> $$ = nterm item (0xff92faf8 'a') +0x567f83d4->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92faf8 } +0x567f83d4->Object::Object { 0x567f83c4, 0xff92faf8 } +0xff92faf8->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92faf8 } Entering state 10 Stack now 0 10 10 Reading a token -0xffa5edef->Object::Object { 0x578e93c4, 0x578e93d4 } -0xffa5ee98->Object::Object { 0x578e93c4, 0x578e93d4, 0xffa5edef } -0xffa5edef->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5edef, 0xffa5ee98 } -Next token is token 'a' (0xffa5ee98 'a') -0xffa5ede8->Object::Object { 0x578e93c4, 0x578e93d4, 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5ede8, 0xffa5ee98 } -Shifting token 'a' (0xffa5ede8 'a') -0x578e93e4->Object::Object { 0x578e93c4, 0x578e93d4, 0xffa5ede8 } -0xffa5ede8->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ede8 } +0xff92fa3f->Object::Object { 0x567f83c4, 0x567f83d4 } +0xff92fae8->Object::Object { 0x567f83c4, 0x567f83d4, 0xff92fa3f } +0xff92fa3f->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92fa3f, 0xff92fae8 } +Next token is token 'a' (0xff92fae8 'a') +0xff92fa38->Object::Object { 0x567f83c4, 0x567f83d4, 0xff92fae8 } +0xff92fae8->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92fa38, 0xff92fae8 } +Shifting token 'a' (0xff92fa38 'a') +0x567f83e4->Object::Object { 0x567f83c4, 0x567f83d4, 0xff92fa38 } +0xff92fa38->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa38 } Entering state 1 Stack now 0 10 10 1 -0xffa5eea8->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4 } +0xff92faf8->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578e93e4 'a') --> $$ = nterm item (0xffa5eea8 'a') -0x578e93e4->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5eea8 } -0x578e93e4->Object::Object { 0x578e93c4, 0x578e93d4, 0xffa5eea8 } -0xffa5eea8->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5eea8 } + $1 = token 'a' (0x567f83e4 'a') +-> $$ = nterm item (0xff92faf8 'a') +0x567f83e4->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92faf8 } +0x567f83e4->Object::Object { 0x567f83c4, 0x567f83d4, 0xff92faf8 } +0xff92faf8->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92faf8 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xffa5edef->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4 } -0xffa5ee98->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5edef } -0xffa5edef->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5edef, 0xffa5ee98 } -Next token is token 'a' (0xffa5ee98 'a') -0xffa5ede8->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ede8, 0xffa5ee98 } -Shifting token 'a' (0xffa5ede8 'a') -0x578e93f4->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ede8 } -0xffa5ede8->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5ede8 } +0xff92fa3f->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4 } +0xff92fae8->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa3f } +0xff92fa3f->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa3f, 0xff92fae8 } +Next token is token 'a' (0xff92fae8 'a') +0xff92fa38->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fae8 } +0xff92fae8->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa38, 0xff92fae8 } +Shifting token 'a' (0xff92fa38 'a') +0x567f83f4->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa38 } +0xff92fa38->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92fa38 } Entering state 1 Stack now 0 10 10 10 1 -0xffa5eea8->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4 } +0xff92faf8->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578e93f4 'a') --> $$ = nterm item (0xffa5eea8 'a') -0x578e93f4->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5eea8 } -0x578e93f4->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5eea8 } -0xffa5eea8->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5eea8 } + $1 = token 'a' (0x567f83f4 'a') +-> $$ = nterm item (0xff92faf8 'a') +0x567f83f4->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92faf8 } +0x567f83f4->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92faf8 } +0xff92faf8->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92faf8 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xffa5edef->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4 } -0xffa5ee98->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5edef } -0xffa5edef->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5edef, 0xffa5ee98 } -Next token is token 'p' (0xffa5ee98 'p'Exception caught: cleaning lookahead and stack -0x578e93f4->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5ee98 } -0x578e93e4->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ee98 } -0x578e93d4->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5ee98 } -0x578e93c4->Object::~Object { 0x578e93c4, 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0xffa5ee98 } +0xff92fa3f->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4 } +0xff92fae8->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92fa3f } +0xff92fa3f->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92fa3f, 0xff92fae8 } +Next token is token 'p' (0xff92fae8 'p'Exception caught: cleaning lookahead and stack +0x567f83f4->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92fae8 } +0x567f83e4->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fae8 } +0x567f83d4->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92fae8 } +0x567f83c4->Object::~Object { 0x567f83c4, 0xff92fae8 } +0xff92fae8->Object::~Object { 0xff92fae8 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -269677,99 +268172,99 @@ Entering state 0 Stack now 0 Reading a token -0xffa5edef->Object::Object { } -0xffa5ee98->Object::Object { 0xffa5edef } -0xffa5edef->Object::~Object { 0xffa5edef, 0xffa5ee98 } -Next token is token 'a' (0xffa5ee98 'a') -0xffa5ede8->Object::Object { 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0xffa5ede8, 0xffa5ee98 } -Shifting token 'a' (0xffa5ede8 'a') -0x578e93c4->Object::Object { 0xffa5ede8 } -0xffa5ede8->Object::~Object { 0x578e93c4, 0xffa5ede8 } +0xff92fa3f->Object::Object { } +0xff92fae8->Object::Object { 0xff92fa3f } +0xff92fa3f->Object::~Object { 0xff92fa3f, 0xff92fae8 } +Next token is token 'a' (0xff92fae8 'a') +0xff92fa38->Object::Object { 0xff92fae8 } +0xff92fae8->Object::~Object { 0xff92fa38, 0xff92fae8 } +Shifting token 'a' (0xff92fa38 'a') +0x567f83c4->Object::Object { 0xff92fa38 } +0xff92fa38->Object::~Object { 0x567f83c4, 0xff92fa38 } Entering state 1 Stack now 0 1 -0xffa5eea8->Object::Object { 0x578e93c4 } +0xff92faf8->Object::Object { 0x567f83c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578e93c4 'a') --> $$ = nterm item (0xffa5eea8 'a') -0x578e93c4->Object::~Object { 0x578e93c4, 0xffa5eea8 } -0x578e93c4->Object::Object { 0xffa5eea8 } -0xffa5eea8->Object::~Object { 0x578e93c4, 0xffa5eea8 } + $1 = token 'a' (0x567f83c4 'a') +-> $$ = nterm item (0xff92faf8 'a') +0x567f83c4->Object::~Object { 0x567f83c4, 0xff92faf8 } +0x567f83c4->Object::Object { 0xff92faf8 } +0xff92faf8->Object::~Object { 0x567f83c4, 0xff92faf8 } Entering state 10 Stack now 0 10 Reading a token -0xffa5edef->Object::Object { 0x578e93c4 } -0xffa5ee98->Object::Object { 0x578e93c4, 0xffa5edef } -0xffa5edef->Object::~Object { 0x578e93c4, 0xffa5edef, 0xffa5ee98 } -Next token is token 'a' (0xffa5ee98 'a') -0xffa5ede8->Object::Object { 0x578e93c4, 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0x578e93c4, 0xffa5ede8, 0xffa5ee98 } -Shifting token 'a' (0xffa5ede8 'a') -0x578e93d4->Object::Object { 0x578e93c4, 0xffa5ede8 } -0xffa5ede8->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5ede8 } +0xff92fa3f->Object::Object { 0x567f83c4 } +0xff92fae8->Object::Object { 0x567f83c4, 0xff92fa3f } +0xff92fa3f->Object::~Object { 0x567f83c4, 0xff92fa3f, 0xff92fae8 } +Next token is token 'a' (0xff92fae8 'a') +0xff92fa38->Object::Object { 0x567f83c4, 0xff92fae8 } +0xff92fae8->Object::~Object { 0x567f83c4, 0xff92fa38, 0xff92fae8 } +Shifting token 'a' (0xff92fa38 'a') +0x567f83d4->Object::Object { 0x567f83c4, 0xff92fa38 } +0xff92fa38->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92fa38 } Entering state 1 Stack now 0 10 1 -0xffa5eea8->Object::Object { 0x578e93c4, 0x578e93d4 } +0xff92faf8->Object::Object { 0x567f83c4, 0x567f83d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578e93d4 'a') --> $$ = nterm item (0xffa5eea8 'a') -0x578e93d4->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5eea8 } -0x578e93d4->Object::Object { 0x578e93c4, 0xffa5eea8 } -0xffa5eea8->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5eea8 } + $1 = token 'a' (0x567f83d4 'a') +-> $$ = nterm item (0xff92faf8 'a') +0x567f83d4->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92faf8 } +0x567f83d4->Object::Object { 0x567f83c4, 0xff92faf8 } +0xff92faf8->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92faf8 } Entering state 10 Stack now 0 10 10 Reading a token -0xffa5edef->Object::Object { 0x578e93c4, 0x578e93d4 } -0xffa5ee98->Object::Object { 0x578e93c4, 0x578e93d4, 0xffa5edef } -0xffa5edef->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5edef, 0xffa5ee98 } -Next token is token 'a' (0xffa5ee98 'a') -0xffa5ede8->Object::Object { 0x578e93c4, 0x578e93d4, 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5ede8, 0xffa5ee98 } -Shifting token 'a' (0xffa5ede8 'a') -0x578e93e4->Object::Object { 0x578e93c4, 0x578e93d4, 0xffa5ede8 } -0xffa5ede8->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ede8 } +0xff92fa3f->Object::Object { 0x567f83c4, 0x567f83d4 } +0xff92fae8->Object::Object { 0x567f83c4, 0x567f83d4, 0xff92fa3f } +0xff92fa3f->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92fa3f, 0xff92fae8 } +Next token is token 'a' (0xff92fae8 'a') +0xff92fa38->Object::Object { 0x567f83c4, 0x567f83d4, 0xff92fae8 } +0xff92fae8->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92fa38, 0xff92fae8 } +Shifting token 'a' (0xff92fa38 'a') +0x567f83e4->Object::Object { 0x567f83c4, 0x567f83d4, 0xff92fa38 } +0xff92fa38->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa38 } Entering state 1 Stack now 0 10 10 1 -0xffa5eea8->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4 } +0xff92faf8->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578e93e4 'a') --> $$ = nterm item (0xffa5eea8 'a') -0x578e93e4->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5eea8 } -0x578e93e4->Object::Object { 0x578e93c4, 0x578e93d4, 0xffa5eea8 } -0xffa5eea8->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5eea8 } + $1 = token 'a' (0x567f83e4 'a') +-> $$ = nterm item (0xff92faf8 'a') +0x567f83e4->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92faf8 } +0x567f83e4->Object::Object { 0x567f83c4, 0x567f83d4, 0xff92faf8 } +0xff92faf8->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92faf8 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xffa5edef->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4 } -0xffa5ee98->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5edef } -0xffa5edef->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5edef, 0xffa5ee98 } -Next token is token 'a' (0xffa5ee98 'a') -0xffa5ede8->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ede8, 0xffa5ee98 } -Shifting token 'a' (0xffa5ede8 'a') -0x578e93f4->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ede8 } -0xffa5ede8->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5ede8 } +0xff92fa3f->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4 } +0xff92fae8->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa3f } +0xff92fa3f->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa3f, 0xff92fae8 } +Next token is token 'a' (0xff92fae8 'a') +0xff92fa38->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fae8 } +0xff92fae8->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa38, 0xff92fae8 } +Shifting token 'a' (0xff92fa38 'a') +0x567f83f4->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fa38 } +0xff92fa38->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92fa38 } Entering state 1 Stack now 0 10 10 10 1 -0xffa5eea8->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4 } +0xff92faf8->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x578e93f4 'a') --> $$ = nterm item (0xffa5eea8 'a') -0x578e93f4->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5eea8 } -0x578e93f4->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5eea8 } -0xffa5eea8->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5eea8 } + $1 = token 'a' (0x567f83f4 'a') +-> $$ = nterm item (0xff92faf8 'a') +0x567f83f4->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92faf8 } +0x567f83f4->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92faf8 } +0xff92faf8->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92faf8 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xffa5edef->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4 } -0xffa5ee98->Object::Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5edef } -0xffa5edef->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5edef, 0xffa5ee98 } -Next token is token 'p' (0xffa5ee98 'p'Exception caught: cleaning lookahead and stack -0x578e93f4->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0x578e93f4, 0xffa5ee98 } -0x578e93e4->Object::~Object { 0x578e93c4, 0x578e93d4, 0x578e93e4, 0xffa5ee98 } -0x578e93d4->Object::~Object { 0x578e93c4, 0x578e93d4, 0xffa5ee98 } -0x578e93c4->Object::~Object { 0x578e93c4, 0xffa5ee98 } -0xffa5ee98->Object::~Object { 0xffa5ee98 } +0xff92fa3f->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4 } +0xff92fae8->Object::Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92fa3f } +0xff92fa3f->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92fa3f, 0xff92fae8 } +Next token is token 'p' (0xff92fae8 'p'Exception caught: cleaning lookahead and stack +0x567f83f4->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0x567f83f4, 0xff92fae8 } +0x567f83e4->Object::~Object { 0x567f83c4, 0x567f83d4, 0x567f83e4, 0xff92fae8 } +0x567f83d4->Object::~Object { 0x567f83c4, 0x567f83d4, 0xff92fae8 } +0x567f83c4->Object::~Object { 0x567f83c4, 0xff92fae8 } +0xff92fae8->Object::~Object { 0xff92fae8 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr @@ -269780,9 +268275,11 @@ exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaE +725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ... stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y ./c++.at:1363: $PREPARSER ./input aaaaT stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -269793,9 +268290,55 @@ ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./glr-regression.at:488: $PREPARSER ./glr-regr3 input.txt +stderr: +./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +720. glr-regression.at:488: ok +./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS + +stderr: +stdout: +./glr-regression.at:206: $PREPARSER ./glr-regr1 BPBPB +stderr: +./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +715. glr-regression.at:206: ok + +726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ... +./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y +stderr: +stdout: +stderr: +./c++.at:569: $here/modern +stdout: ./c++.at:1362: $PREPARSER ./input aaaas +stdout: +Modern C++: 202100 +./c++.at:569: $PREPARSER ./list stderr: exception caught: reduction +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaal stderr: @@ -269805,108 +268348,116 @@ stderr: exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +669. c++.at:569: ok ./c++.at:1362: $PREPARSER ./input aaaap stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS +stderr: ./c++.at:1362: $PREPARSER ./input --debug aaaap + +stdout: +727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ... +689. c++.at:1371: ok +./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xfff9092f->Object::Object { } -0xfff909d8->Object::Object { 0xfff9092f } -0xfff9092f->Object::~Object { 0xfff9092f, 0xfff909d8 } -Next token is token 'a' (0xfff909d8 'a') -0xfff90928->Object::Object { 0xfff909d8 } -0xfff909d8->Object::~Object { 0xfff90928, 0xfff909d8 } -Shifting token 'a' (0xfff90928 'a') -0x57bcb3c4->Object::Object { 0xfff90928 } -0xfff90928->Object::~Object { 0x57bcb3c4, 0xfff90928 } +0xff8925af->Object::Object { } +0xff892658->Object::Object { 0xff8925af } +0xff8925af->Object::~Object { 0xff8925af, 0xff892658 } +Next token is token 'a' (0xff892658 'a') +0xff8925a8->Object::Object { 0xff892658 } +0xff892658->Object::~Object { 0xff8925a8, 0xff892658 } +Shifting token 'a' (0xff8925a8 'a') +0x578ea3c4->Object::Object { 0xff8925a8 } +0xff8925a8->Object::~Object { 0x578ea3c4, 0xff8925a8 } Entering state 2 Stack now 0 2 -0xfff909e8->Object::Object { 0x57bcb3c4 } +0xff892668->Object::Object { 0x578ea3c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bcb3c4 'a') --> $$ = nterm item (0xfff909e8 'a') -0x57bcb3c4->Object::~Object { 0x57bcb3c4, 0xfff909e8 } -0x57bcb3c4->Object::Object { 0xfff909e8 } -0xfff909e8->Object::~Object { 0x57bcb3c4, 0xfff909e8 } + $1 = token 'a' (0x578ea3c4 'a') +-> $$ = nterm item (0xff892668 'a') +0x578ea3c4->Object::~Object { 0x578ea3c4, 0xff892668 } +0x578ea3c4->Object::Object { 0xff892668 } +0xff892668->Object::~Object { 0x578ea3c4, 0xff892668 } Entering state 11 Stack now 0 11 Reading a token -0xfff9092f->Object::Object { 0x57bcb3c4 } -0xfff909d8->Object::Object { 0x57bcb3c4, 0xfff9092f } -0xfff9092f->Object::~Object { 0x57bcb3c4, 0xfff9092f, 0xfff909d8 } -Next token is token 'a' (0xfff909d8 'a') -0xfff90928->Object::Object { 0x57bcb3c4, 0xfff909d8 } -0xfff909d8->Object::~Object { 0x57bcb3c4, 0xfff90928, 0xfff909d8 } -Shifting token 'a' (0xfff90928 'a') -0x57bcb3d4->Object::Object { 0x57bcb3c4, 0xfff90928 } -0xfff90928->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff90928 } +0xff8925af->Object::Object { 0x578ea3c4 } +0xff892658->Object::Object { 0x578ea3c4, 0xff8925af } +0xff8925af->Object::~Object { 0x578ea3c4, 0xff8925af, 0xff892658 } +Next token is token 'a' (0xff892658 'a') +0xff8925a8->Object::Object { 0x578ea3c4, 0xff892658 } +0xff892658->Object::~Object { 0x578ea3c4, 0xff8925a8, 0xff892658 } +Shifting token 'a' (0xff8925a8 'a') +0x578ea3d4->Object::Object { 0x578ea3c4, 0xff8925a8 } +0xff8925a8->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff8925a8 } Entering state 2 Stack now 0 11 2 -0xfff909e8->Object::Object { 0x57bcb3c4, 0x57bcb3d4 } +0xff892668->Object::Object { 0x578ea3c4, 0x578ea3d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bcb3d4 'a') --> $$ = nterm item (0xfff909e8 'a') -0x57bcb3d4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909e8 } -0x57bcb3d4->Object::Object { 0x57bcb3c4, 0xfff909e8 } -0xfff909e8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909e8 } + $1 = token 'a' (0x578ea3d4 'a') +-> $$ = nterm item (0xff892668 'a') +0x578ea3d4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff892668 } +0x578ea3d4->Object::Object { 0x578ea3c4, 0xff892668 } +0xff892668->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff892668 } Entering state 11 Stack now 0 11 11 Reading a token -0xfff9092f->Object::Object { 0x57bcb3c4, 0x57bcb3d4 } -0xfff909d8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff9092f } -0xfff9092f->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff9092f, 0xfff909d8 } -Next token is token 'a' (0xfff909d8 'a') -0xfff90928->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909d8 } -0xfff909d8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff90928, 0xfff909d8 } -Shifting token 'a' (0xfff90928 'a') -0x57bcb3e4->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff90928 } -0xfff90928->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff90928 } +0xff8925af->Object::Object { 0x578ea3c4, 0x578ea3d4 } +0xff892658->Object::Object { 0x578ea3c4, 0x578ea3d4, 0xff8925af } +0xff8925af->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff8925af, 0xff892658 } +Next token is token 'a' (0xff892658 'a') +0xff8925a8->Object::Object { 0x578ea3c4, 0x578ea3d4, 0xff892658 } +0xff892658->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff8925a8, 0xff892658 } +Shifting token 'a' (0xff8925a8 'a') +0x578ea3e4->Object::Object { 0x578ea3c4, 0x578ea3d4, 0xff8925a8 } +0xff8925a8->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925a8 } Entering state 2 Stack now 0 11 11 2 -0xfff909e8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4 } +0xff892668->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bcb3e4 'a') --> $$ = nterm item (0xfff909e8 'a') -0x57bcb3e4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909e8 } -0x57bcb3e4->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909e8 } -0xfff909e8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909e8 } + $1 = token 'a' (0x578ea3e4 'a') +-> $$ = nterm item (0xff892668 'a') +0x578ea3e4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892668 } +0x578ea3e4->Object::Object { 0x578ea3c4, 0x578ea3d4, 0xff892668 } +0xff892668->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892668 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xfff9092f->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4 } -0xfff909d8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff9092f } -0xfff9092f->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff9092f, 0xfff909d8 } -Next token is token 'a' (0xfff909d8 'a') -0xfff90928->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909d8 } -0xfff909d8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff90928, 0xfff909d8 } -Shifting token 'a' (0xfff90928 'a') -0x57bcb3f4->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff90928 } -0xfff90928->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff90928 } +0xff8925af->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4 } +0xff892658->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925af } +0xff8925af->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925af, 0xff892658 } +Next token is token 'a' (0xff892658 'a') +0xff8925a8->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892658 } +0xff892658->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925a8, 0xff892658 } +Shifting token 'a' (0xff8925a8 'a') +0x578ea3f4->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925a8 } +0xff8925a8->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff8925a8 } Entering state 2 Stack now 0 11 11 11 2 -0xfff909e8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4 } +0xff892668->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bcb3f4 'a') --> $$ = nterm item (0xfff909e8 'a') -0x57bcb3f4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff909e8 } -0x57bcb3f4->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909e8 } -0xfff909e8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff909e8 } + $1 = token 'a' (0x578ea3f4 'a') +-> $$ = nterm item (0xff892668 'a') +0x578ea3f4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff892668 } +0x578ea3f4->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892668 } +0xff892668->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff892668 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xfff9092f->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4 } -0xfff909d8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff9092f } -0xfff9092f->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff9092f, 0xfff909d8 } -Next token is token 'p' (0xfff909d8 'p'Exception caught: cleaning lookahead and stack -0x57bcb3f4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff909d8 } -0x57bcb3e4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909d8 } -0x57bcb3d4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909d8 } -0x57bcb3c4->Object::~Object { 0x57bcb3c4, 0xfff909d8 } -0xfff909d8->Object::~Object { 0xfff909d8 } +0xff8925af->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4 } +0xff892658->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff8925af } +0xff8925af->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff8925af, 0xff892658 } +Next token is token 'p' (0xff892658 'p'Exception caught: cleaning lookahead and stack +0x578ea3f4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff892658 } +0x578ea3e4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892658 } +0x578ea3d4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff892658 } +0x578ea3c4->Object::~Object { 0x578ea3c4, 0xff892658 } +0xff892658->Object::~Object { 0xff892658 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -269915,103 +268466,104 @@ Entering state 0 Stack now 0 Reading a token -0xfff9092f->Object::Object { } -0xfff909d8->Object::Object { 0xfff9092f } -0xfff9092f->Object::~Object { 0xfff9092f, 0xfff909d8 } -Next token is token 'a' (0xfff909d8 'a') -0xfff90928->Object::Object { 0xfff909d8 } -0xfff909d8->Object::~Object { 0xfff90928, 0xfff909d8 } -Shifting token 'a' (0xfff90928 'a') -0x57bcb3c4->Object::Object { 0xfff90928 } -0xfff90928->Object::~Object { 0x57bcb3c4, 0xfff90928 } +0xff8925af->Object::Object { } +0xff892658->Object::Object { 0xff8925af } +0xff8925af->Object::~Object { 0xff8925af, 0xff892658 } +Next token is token 'a' (0xff892658 'a') +0xff8925a8->Object::Object { 0xff892658 } +0xff892658->Object::~Object { 0xff8925a8, 0xff892658 } +Shifting token 'a' (0xff8925a8 'a') +0x578ea3c4->Object::Object { 0xff8925a8 } +0xff8925a8->Object::~Object { 0x578ea3c4, 0xff8925a8 } Entering state 2 Stack now 0 2 -0xfff909e8->Object::Object { 0x57bcb3c4 } +0xff892668->Object::Object { 0x578ea3c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bcb3c4 'a') --> $$ = nterm item (0xfff909e8 'a') -0x57bcb3c4->Object::~Object { 0x57bcb3c4, 0xfff909e8 } -0x57bcb3c4->Object::Object { 0xfff909e8 } -0xfff909e8->Object::~Object { 0x57bcb3c4, 0xfff909e8 } + $1 = token 'a' (0x578ea3c4 'a') +-> $$ = nterm item (0xff892668 'a') +0x578ea3c4->Object::~Object { 0x578ea3c4, 0xff892668 } +0x578ea3c4->Object::Object { 0xff892668 } +0xff892668->Object::~Object { 0x578ea3c4, 0xff892668 } Entering state 11 Stack now 0 11 Reading a token -0xfff9092f->Object::Object { 0x57bcb3c4 } -0xfff909d8->Object::Object { 0x57bcb3c4, 0xfff9092f } -0xfff9092f->Object::~Object { 0x57bcb3c4, 0xfff9092f, 0xfff909d8 } -Next token is token 'a' (0xfff909d8 'a') -0xfff90928->Object::Object { 0x57bcb3c4, 0xfff909d8 } -0xfff909d8->Object::~Object { 0x57bcb3c4, 0xfff90928, 0xfff909d8 } -Shifting token 'a' (0xfff90928 'a') -0x57bcb3d4->Object::Object { 0x57bcb3c4, 0xfff90928 } -0xfff90928->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff90928 } +0xff8925af->Object::Object { 0x578ea3c4 } +0xff892658->Object::Object { 0x578ea3c4, 0xff8925af } +0xff8925af->Object::~Object { 0x578ea3c4, 0xff8925af, 0xff892658 } +Next token is token 'a' (0xff892658 'a') +0xff8925a8->Object::Object { 0x578ea3c4, 0xff892658 } +0xff892658->Object::~Object { 0x578ea3c4, 0xff8925a8, 0xff892658 } +Shifting token 'a' (0xff8925a8 'a') +0x578ea3d4->Object::Object { 0x578ea3c4, 0xff8925a8 } +0xff8925a8->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff8925a8 } Entering state 2 Stack now 0 11 2 -0xfff909e8->Object::Object { 0x57bcb3c4, 0x57bcb3d4 } +0xff892668->Object::Object { 0x578ea3c4, 0x578ea3d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bcb3d4 'a') --> $$ = nterm item (0xfff909e8 'a') -0x57bcb3d4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909e8 } -0x57bcb3d4->Object::Object { 0x57bcb3c4, 0xfff909e8 } -0xfff909e8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909e8 } + $1 = token 'a' (0x578ea3d4 'a') +-> $$ = nterm item (0xff892668 'a') +0x578ea3d4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff892668 } +0x578ea3d4->Object::Object { 0x578ea3c4, 0xff892668 } +0xff892668->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff892668 } Entering state 11 Stack now 0 11 11 Reading a token -0xfff9092f->Object::Object { 0x57bcb3c4, 0x57bcb3d4 } -0xfff909d8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff9092f } -0xfff9092f->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff9092f, 0xfff909d8 } -Next token is token 'a' (0xfff909d8 'a') -0xfff90928->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909d8 } -0xfff909d8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff90928, 0xfff909d8 } -Shifting token 'a' (0xfff90928 'a') -0x57bcb3e4->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff90928 } -0xfff90928->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff90928 } +0xff8925af->Object::Object { 0x578ea3c4, 0x578ea3d4 } +0xff892658->Object::Object { 0x578ea3c4, 0x578ea3d4, 0xff8925af } +0xff8925af->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff8925af, 0xff892658 } +Next token is token 'a' (0xff892658 'a') +0xff8925a8->Object::Object { 0x578ea3c4, 0x578ea3d4, 0xff892658 } +0xff892658->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff8925a8, 0xff892658 } +Shifting token 'a' (0xff8925a8 'a') +0x578ea3e4->Object::Object { 0x578ea3c4, 0x578ea3d4, 0xff8925a8 } +0xff8925a8->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925a8 } Entering state 2 Stack now 0 11 11 2 -0xfff909e8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4 } +0xff892668->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bcb3e4 'a') --> $$ = nterm item (0xfff909e8 'a') -0x57bcb3e4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909e8 } -0x57bcb3e4->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909e8 } -0xfff909e8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909e8 } + $1 = token 'a' (0x578ea3e4 'a') +-> $$ = nterm item (0xff892668 'a') +0x578ea3e4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892668 } +0x578ea3e4->Object::Object { 0x578ea3c4, 0x578ea3d4, 0xff892668 } +0xff892668->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892668 } Entering state 11 Stack now 0 11 11 11 Reading a token -0xfff9092f->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4 } -0xfff909d8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff9092f } -0xfff9092f->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff9092f, 0xfff909d8 } -Next token is token 'a' (0xfff909d8 'a') -0xfff90928->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909d8 } -0xfff909d8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff90928, 0xfff909d8 } -Shifting token 'a' (0xfff90928 'a') -0x57bcb3f4->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff90928 } -0xfff90928->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff90928 } +0xff8925af->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4 } +0xff892658->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925af } +0xff8925af->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925af, 0xff892658 } +Next token is token 'a' (0xff892658 'a') +0xff8925a8->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892658 } +0xff892658->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925a8, 0xff892658 } +Shifting token 'a' (0xff8925a8 'a') +0x578ea3f4->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff8925a8 } +0xff8925a8->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff8925a8 } Entering state 2 Stack now 0 11 11 11 2 -0xfff909e8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4 } +0xff892668->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57bcb3f4 'a') --> $$ = nterm item (0xfff909e8 'a') -0x57bcb3f4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff909e8 } -0x57bcb3f4->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909e8 } -0xfff909e8->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff909e8 } + $1 = token 'a' (0x578ea3f4 'a') +-> $$ = nterm item (0xff892668 'a') +0x578ea3f4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff892668 } +0x578ea3f4->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892668 } +0xff892668->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff892668 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0xfff9092f->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4 } -0xfff909d8->Object::Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff9092f } -0xfff9092f->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff9092f, 0xfff909d8 } -Next token is token 'p' (0xfff909d8 'p'Exception caught: cleaning lookahead and stack -0x57bcb3f4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0x57bcb3f4, 0xfff909d8 } -0x57bcb3e4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0x57bcb3e4, 0xfff909d8 } -0x57bcb3d4->Object::~Object { 0x57bcb3c4, 0x57bcb3d4, 0xfff909d8 } -0x57bcb3c4->Object::~Object { 0x57bcb3c4, 0xfff909d8 } -0xfff909d8->Object::~Object { 0xfff909d8 } +0xff8925af->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4 } +0xff892658->Object::Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff8925af } +0xff8925af->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff8925af, 0xff892658 } +Next token is token 'p' (0xff892658 'p'Exception caught: cleaning lookahead and stack +0x578ea3f4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0x578ea3f4, 0xff892658 } +0x578ea3e4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0x578ea3e4, 0xff892658 } +0x578ea3d4->Object::~Object { 0x578ea3c4, 0x578ea3d4, 0xff892658 } +0x578ea3c4->Object::~Object { 0x578ea3c4, 0xff892658 } +0xff892658->Object::~Object { 0xff892658 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr stdout: + exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae stderr: @@ -270027,81 +268579,137 @@ ./c++.at:1362: $PREPARSER ./input aaaaR stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -687. c++.at:1362: ok - -722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ... -./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y -./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ... +./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +./glr-regression.at:597: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ... +./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS +./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +stderr: +stdout: +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./glr-regression.at:592: $PREPARSER ./glr-regr4 +stderr: +./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +723. glr-regression.at:592: ok stderr: stdout: ./glr-regression.at:355: $PREPARSER ./glr-regr2a input1.txt + stderr: ./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:355: $PREPARSER ./glr-regr2a input2.txt stderr: ./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ./glr-regression.at:355: $PREPARSER ./glr-regr2a input3.txt -./glr-regression.at:488: $PREPARSER ./glr-regr3 input.txt -stderr: stderr: ./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 718. glr-regression.at:355: ok -720. glr-regression.at:488: ok + +730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ... +./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y +731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ... +./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y +./glr-regression.at:671: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS stderr: stdout: -./glr-regression.at:207: $PREPARSER ./glr-regr1 BPBPB - +./c++.at:574: $here/modern +stdout: +Modern C++: 202100 +./c++.at:574: $PREPARSER ./list stderr: - -./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -716. glr-regression.at:207: ok - -723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ... -./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y -724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ... -./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ... -./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:859: $PREPARSER ./input +./c++.at:851: $PREPARSER ./input stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS -./glr-regression.at:593: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +======== Testing with C++ standard flags: '' +674. c++.at:574: ok +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./glr-regression.at:489: $PREPARSER ./glr-regr3 input.txt stderr: ./glr-regression.at:489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + 721. glr-regression.at:489: ok -726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ... -./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y -./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS stderr: stdout: -./glr-regression.at:592: $PREPARSER ./glr-regr4 +./c++.at:1555: $PREPARSER ./test +732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ... +./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y stderr: -./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -723. glr-regression.at:592: ok - -727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ... -./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./glr-regression.at:597: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ... +./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y +./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS +./glr-regression.at:739: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS stderr: stdout: -./c++.at:859: $PREPARSER ./input +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./glr-regression.at:596: $PREPARSER ./glr-regr4 @@ -270109,371 +268717,305 @@ ./glr-regression.at:596: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 726. glr-regression.at:596: ok -728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ... -./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS stderr: stdout: -./glr-regression.at:356: $PREPARSER ./glr-regr2a input1.txt +./glr-regression.at:670: $PREPARSER ./glr-regr5 stderr: -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:356: $PREPARSER ./glr-regr2a input2.txt +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:356: $PREPARSER ./glr-regr2a input3.txt +stdout: +./c++.at:573: $here/modern +729. glr-regression.at:670: ok +stdout: +Modern C++: 202100 +./c++.at:573: $PREPARSER ./list + +734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ... +./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y stderr: -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -719. glr-regression.at:356: ok +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +673. c++.at:573: ok +./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS + +735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ... +./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y +./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS +736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ... +./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y +./glr-regression.at:844: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS stderr: stdout: +./glr-regression.at:738: $PREPARSER ./glr-regr6 +stderr: +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +732. glr-regression.at:738: ok +stderr: +stdout: ./glr-regression.at:593: $PREPARSER ./glr-regr4 stderr: ./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 724. glr-regression.at:593: ok -729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ... -./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y -./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS -730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ... -./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y -./glr-regression.at:671: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaas +./glr-regression.at:597: $PREPARSER ./glr-regr4 +stderr: +./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +727. glr-regression.at:597: ok + +737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ... +./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y +738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ... +./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y +739. glr-regression.at:945: testing Incorrectly initialized location for empty right-hand side in GLR: glr.cc ... +./glr-regression.at:945: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y +./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS +./glr-regression.at:944: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr8 glr-regr8.c $LIBS +./glr-regression.at:945: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +stderr: +stdout: +./glr-regression.at:207: $PREPARSER ./glr-regr1 BPBPB +stderr: +./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +716. glr-regression.at:207: ok + +stderr: +stdout: +./glr-regression.at:671: $PREPARSER ./glr-regr5 +stderr: +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ... +./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y +730. glr-regression.at:671: stderr: + ok +stdout: +./glr-regression.at:843: $PREPARSER ./glr-regr7 +stderr: +memory exhausted + +./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +735. glr-regression.at:843: ok + +stderr: +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas stderr: exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal stderr: exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ... +./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y +./c++.at:1361: $PREPARSER ./input i stderr: exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff8c453f->Object::Object { } -0xff8c45e8->Object::Object { 0xff8c453f } -0xff8c453f->Object::~Object { 0xff8c453f, 0xff8c45e8 } -Next token is token 'a' (0xff8c45e8 'a') -0xff8c4538->Object::Object { 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0xff8c4538, 0xff8c45e8 } -Shifting token 'a' (0xff8c4538 'a') -0x575b23c4->Object::Object { 0xff8c4538 } -0xff8c4538->Object::~Object { 0x575b23c4, 0xff8c4538 } +0x57087a00->Object::Object { } +Next token is token 'a' (0x57087a00 'a') +Shifting token 'a' (0x57087a00 'a') Entering state 1 Stack now 0 1 -0xff8c45f8->Object::Object { 0x575b23c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x575b23c4 'a') --> $$ = nterm item (0xff8c45f8 'a') -0x575b23c4->Object::~Object { 0x575b23c4, 0xff8c45f8 } -0x575b23c4->Object::Object { 0xff8c45f8 } -0xff8c45f8->Object::~Object { 0x575b23c4, 0xff8c45f8 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57087a00 'a') +-> $$ = nterm item (0x57087a00 'a') Entering state 10 Stack now 0 10 Reading a token -0xff8c453f->Object::Object { 0x575b23c4 } -0xff8c45e8->Object::Object { 0x575b23c4, 0xff8c453f } -0xff8c453f->Object::~Object { 0x575b23c4, 0xff8c453f, 0xff8c45e8 } -Next token is token 'a' (0xff8c45e8 'a') -0xff8c4538->Object::Object { 0x575b23c4, 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0x575b23c4, 0xff8c4538, 0xff8c45e8 } -Shifting token 'a' (0xff8c4538 'a') -0x575b23d4->Object::Object { 0x575b23c4, 0xff8c4538 } -0xff8c4538->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c4538 } +0x57087a30->Object::Object { 0x57087a00 } +Next token is token 'a' (0x57087a30 'a') +Shifting token 'a' (0x57087a30 'a') Entering state 1 Stack now 0 10 1 -0xff8c45f8->Object::Object { 0x575b23c4, 0x575b23d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x575b23d4 'a') --> $$ = nterm item (0xff8c45f8 'a') -0x575b23d4->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c45f8 } -0x575b23d4->Object::Object { 0x575b23c4, 0xff8c45f8 } -0xff8c45f8->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c45f8 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57087a30 'a') +-> $$ = nterm item (0x57087a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0xff8c453f->Object::Object { 0x575b23c4, 0x575b23d4 } -0xff8c45e8->Object::Object { 0x575b23c4, 0x575b23d4, 0xff8c453f } -0xff8c453f->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c453f, 0xff8c45e8 } -Next token is token 'a' (0xff8c45e8 'a') -0xff8c4538->Object::Object { 0x575b23c4, 0x575b23d4, 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c4538, 0xff8c45e8 } -Shifting token 'a' (0xff8c4538 'a') -0x575b23e4->Object::Object { 0x575b23c4, 0x575b23d4, 0xff8c4538 } -0xff8c4538->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c4538 } +0x57087a60->Object::Object { 0x57087a00, 0x57087a30 } +Next token is token 'a' (0x57087a60 'a') +Shifting token 'a' (0x57087a60 'a') Entering state 1 Stack now 0 10 10 1 -0xff8c45f8->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x575b23e4 'a') --> $$ = nterm item (0xff8c45f8 'a') -0x575b23e4->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45f8 } -0x575b23e4->Object::Object { 0x575b23c4, 0x575b23d4, 0xff8c45f8 } -0xff8c45f8->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45f8 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57087a60 'a') +-> $$ = nterm item (0x57087a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0xff8c453f->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4 } -0xff8c45e8->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c453f } -0xff8c453f->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c453f, 0xff8c45e8 } -Next token is token 'a' (0xff8c45e8 'a') -0xff8c4538->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c4538, 0xff8c45e8 } -Shifting token 'a' (0xff8c4538 'a') -0x575b23f4->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c4538 } -0xff8c4538->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c4538 } +0x57087a90->Object::Object { 0x57087a00, 0x57087a30, 0x57087a60 } +Next token is token 'a' (0x57087a90 'a') +Shifting token 'a' (0x57087a90 'a') Entering state 1 Stack now 0 10 10 10 1 -0xff8c45f8->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x575b23f4 'a') --> $$ = nterm item (0xff8c45f8 'a') -0x575b23f4->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c45f8 } -0x575b23f4->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45f8 } -0xff8c45f8->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c45f8 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57087a90 'a') +-> $$ = nterm item (0x57087a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff8c453f->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4 } -0xff8c45e8->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c453f } -0xff8c453f->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c453f, 0xff8c45e8 } -Next token is token 'p' (0xff8c45e8 'p'Exception caught: cleaning lookahead and stack -0x575b23f4->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c45e8 } -0x575b23e4->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45e8 } -0x575b23d4->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c45e8 } -0x575b23c4->Object::~Object { 0x575b23c4, 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0xff8c45e8 } +0x57087ac0->Object::Object { 0x57087a00, 0x57087a30, 0x57087a60, 0x57087a90 } +Next token is token 'p' (0x57087ac0 'p'Exception caught: cleaning lookahead and stack +0x57087ac0->Object::~Object { 0x57087a00, 0x57087a30, 0x57087a60, 0x57087a90, 0x57087ac0 } +0x57087a90->Object::~Object { 0x57087a00, 0x57087a30, 0x57087a60, 0x57087a90 } +0x57087a60->Object::~Object { 0x57087a00, 0x57087a30, 0x57087a60 } +0x57087a30->Object::~Object { 0x57087a00, 0x57087a30 } +0x57087a00->Object::~Object { 0x57087a00 } exception caught: printer end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ... stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0xff8c453f->Object::Object { } -0xff8c45e8->Object::Object { 0xff8c453f } -0xff8c453f->Object::~Object { 0xff8c453f, 0xff8c45e8 } -Next token is token 'a' (0xff8c45e8 'a') -0xff8c4538->Object::Object { 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0xff8c4538, 0xff8c45e8 } -Shifting token 'a' (0xff8c4538 'a') -0x575b23c4->Object::Object { 0xff8c4538 } -0xff8c4538->Object::~Object { 0x575b23c4, 0xff8c4538 } +0x57087a00->Object::Object { } +Next token is token 'a' (0x57087a00 'a') +Shifting token 'a' (0x57087a00 'a') Entering state 1 Stack now 0 1 -0xff8c45f8->Object::Object { 0x575b23c4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x575b23c4 'a') --> $$ = nterm item (0xff8c45f8 'a') -0x575b23c4->Object::~Object { 0x575b23c4, 0xff8c45f8 } -0x575b23c4->Object::Object { 0xff8c45f8 } -0xff8c45f8->Object::~Object { 0x575b23c4, 0xff8c45f8 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57087a00 'a') +-> $$ = nterm item (0x57087a00 'a') Entering state 10 Stack now 0 10 Reading a token -0xff8c453f->Object::Object { 0x575b23c4 } -0xff8c45e8->Object::Object { 0x575b23c4, 0xff8c453f } -0xff8c453f->Object::~Object { 0x575b23c4, 0xff8c453f, 0xff8c45e8 } -Next token is token 'a' (0xff8c45e8 'a') -0xff8c4538->Object::Object { 0x575b23c4, 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0x575b23c4, 0xff8c4538, 0xff8c45e8 } -Shifting token 'a' (0xff8c4538 'a') -0x575b23d4->Object::Object { 0x575b23c4, 0xff8c4538 } -0xff8c4538->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c4538 } +0x57087a30->Object::Object { 0x57087a00 } +Next token is token 'a' (0x57087a30 'a') +Shifting token 'a' (0x57087a30 'a') Entering state 1 Stack now 0 10 1 -0xff8c45f8->Object::Object { 0x575b23c4, 0x575b23d4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x575b23d4 'a') --> $$ = nterm item (0xff8c45f8 'a') -0x575b23d4->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c45f8 } -0x575b23d4->Object::Object { 0x575b23c4, 0xff8c45f8 } -0xff8c45f8->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c45f8 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57087a30 'a') +-> $$ = nterm item (0x57087a30 'a') Entering state 10 Stack now 0 10 10 Reading a token -0xff8c453f->Object::Object { 0x575b23c4, 0x575b23d4 } -0xff8c45e8->Object::Object { 0x575b23c4, 0x575b23d4, 0xff8c453f } -0xff8c453f->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c453f, 0xff8c45e8 } -Next token is token 'a' (0xff8c45e8 'a') -0xff8c4538->Object::Object { 0x575b23c4, 0x575b23d4, 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c4538, 0xff8c45e8 } -Shifting token 'a' (0xff8c4538 'a') -0x575b23e4->Object::Object { 0x575b23c4, 0x575b23d4, 0xff8c4538 } -0xff8c4538->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c4538 } +0x57087a60->Object::Object { 0x57087a00, 0x57087a30 } +Next token is token 'a' (0x57087a60 'a') +Shifting token 'a' (0x57087a60 'a') Entering state 1 Stack now 0 10 10 1 -0xff8c45f8->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x575b23e4 'a') --> $$ = nterm item (0xff8c45f8 'a') -0x575b23e4->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45f8 } -0x575b23e4->Object::Object { 0x575b23c4, 0x575b23d4, 0xff8c45f8 } -0xff8c45f8->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45f8 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57087a60 'a') +-> $$ = nterm item (0x57087a60 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0xff8c453f->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4 } -0xff8c45e8->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c453f } -0xff8c453f->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c453f, 0xff8c45e8 } -Next token is token 'a' (0xff8c45e8 'a') -0xff8c4538->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c4538, 0xff8c45e8 } -Shifting token 'a' (0xff8c4538 'a') -0x575b23f4->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c4538 } -0xff8c4538->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c4538 } +0x57087a90->Object::Object { 0x57087a00, 0x57087a30, 0x57087a60 } +Next token is token 'a' (0x57087a90 'a') +Shifting token 'a' (0x57087a90 'a') Entering state 1 Stack now 0 10 10 10 1 -0xff8c45f8->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x575b23f4 'a') --> $$ = nterm item (0xff8c45f8 'a') -0x575b23f4->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c45f8 } -0x575b23f4->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45f8 } -0xff8c45f8->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c45f8 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x57087a90 'a') +-> $$ = nterm item (0x57087a90 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xff8c453f->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4 } -0xff8c45e8->Object::Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c453f } -0xff8c453f->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c453f, 0xff8c45e8 } -Next token is token 'p' (0xff8c45e8 'p'Exception caught: cleaning lookahead and stack -0x575b23f4->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0x575b23f4, 0xff8c45e8 } -0x575b23e4->Object::~Object { 0x575b23c4, 0x575b23d4, 0x575b23e4, 0xff8c45e8 } -0x575b23d4->Object::~Object { 0x575b23c4, 0x575b23d4, 0xff8c45e8 } -0x575b23c4->Object::~Object { 0x575b23c4, 0xff8c45e8 } -0xff8c45e8->Object::~Object { 0xff8c45e8 } +0x57087ac0->Object::Object { 0x57087a00, 0x57087a30, 0x57087a60, 0x57087a90 } +Next token is token 'p' (0x57087ac0 'p'Exception caught: cleaning lookahead and stack +0x57087ac0->Object::~Object { 0x57087a00, 0x57087a30, 0x57087a60, 0x57087a90, 0x57087ac0 } +0x57087a90->Object::~Object { 0x57087a00, 0x57087a30, 0x57087a60, 0x57087a90 } +0x57087a60->Object::~Object { 0x57087a00, 0x57087a30, 0x57087a60 } +0x57087a30->Object::~Object { 0x57087a00, 0x57087a30 } +0x57087a00->Object::~Object { 0x57087a00 } exception caught: printer end { } -./c++.at:1363: grep '^exception caught: printer$' stderr +./c++.at:1361: grep '^exception caught: printer$' stderr +./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y stdout: +./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae +./c++.at:1361: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:490: $PREPARSER ./glr-regr3 input.txt -stderr: -./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -722. glr-regression.at:490: ok - -731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ... -./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y -./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS -stderr: -stdout: -./glr-regression.at:597: $PREPARSER ./glr-regr4 -stderr: -./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -727. glr-regression.at:597: ok - -732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ... -./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y -stderr: -stdout: -./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS -./glr-regression.at:670: $PREPARSER ./glr-regr5 -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -729. glr-regression.at:670: ok - -733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ... -./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y -./glr-regression.at:739: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT stderr: -stdout: -./c++.at:859: $PREPARSER ./input +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:738: $PREPARSER ./glr-regr6 -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -732. glr-regression.at:738: ok -stderr: -stdout: -./glr-regression.at:594: $PREPARSER ./glr-regr4 -stderr: - -./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -725. glr-regression.at:594: ok - -734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ... -./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y -stderr: -stdout: -./glr-regression.at:671: $PREPARSER ./glr-regr5 -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ... -./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y -730. glr-regression.at:671: ok - -./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS -./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS -736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ... -./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y -./glr-regression.at:844: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1037: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS stderr: stdout: ./glr-regression.at:739: $PREPARSER ./glr-regr6 @@ -270490,39 +269032,18 @@ syntax is ambiguous ./glr-regression.at:739: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 733. glr-regression.at:739: ok - -737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ... -./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y -./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS -stderr: -stdout: -./c++.at:860: $PREPARSER ./input -stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:843: $PREPARSER ./glr-regr7 -stderr: -memory exhausted -./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -735. glr-regression.at:843: ok - -738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ... -./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y -./glr-regression.at:944: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr8 glr-regr8.c $LIBS stderr: stdout: -./glr-regression.at:598: $PREPARSER ./glr-regr4 +./c++.at:857: $PREPARSER ./input stderr: -./glr-regression.at:598: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -728. glr-regression.at:598: ok - stderr: stdout: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaas + +======== Testing with C++ standard flags: '' stderr: +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaal @@ -270533,8 +269054,6 @@ stderr: exception caught: initial-action ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -739. glr-regression.at:945: testing Incorrectly initialized location for empty right-hand side in GLR: glr.cc ... -./glr-regression.at:945: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y ./c++.at:1363: $PREPARSER ./input aaaap stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -270544,567 +269063,905 @@ Entering state 0 Stack now 0 Reading a token -0xffd2d6ff->Object::Object { } -0xffd2d7a8->Object::Object { 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'a' (0xffd2d7a8 'a') -0xffd2d6f8->Object::Object { 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0xffd2d6f8, 0xffd2d7a8 } -Shifting token 'a' (0xffd2d6f8 'a') -0x57d103c4->Object::Object { 0xffd2d6f8 } -0xffd2d6f8->Object::~Object { 0x57d103c4, 0xffd2d6f8 } +0xffae62df->Object::Object { } +0xffae6388->Object::Object { 0xffae62df } +0xffae62df->Object::~Object { 0xffae62df, 0xffae6388 } +Next token is token 'a' (0xffae6388 'a') +0xffae62d8->Object::Object { 0xffae6388 } +0xffae6388->Object::~Object { 0xffae62d8, 0xffae6388 } +Shifting token 'a' (0xffae62d8 'a') +0x580893c4->Object::Object { 0xffae62d8 } +0xffae62d8->Object::~Object { 0x580893c4, 0xffae62d8 } Entering state 1 Stack now 0 1 -0xffd2d7b8->Object::Object { 0x57d103c4 } +0xffae6398->Object::Object { 0x580893c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57d103c4 'a') --> $$ = nterm item (0xffd2d7b8 'a') -0x57d103c4->Object::~Object { 0x57d103c4, 0xffd2d7b8 } -0x57d103c4->Object::Object { 0xffd2d7b8 } -0xffd2d7b8->Object::~Object { 0x57d103c4, 0xffd2d7b8 } + $1 = token 'a' (0x580893c4 'a') +-> $$ = nterm item (0xffae6398 'a') +0x580893c4->Object::~Object { 0x580893c4, 0xffae6398 } +0x580893c4->Object::Object { 0xffae6398 } +0xffae6398->Object::~Object { 0x580893c4, 0xffae6398 } Entering state 10 Stack now 0 10 Reading a token -0xffd2d6ff->Object::Object { 0x57d103c4 } -0xffd2d7a8->Object::Object { 0x57d103c4, 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0x57d103c4, 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'a' (0xffd2d7a8 'a') -0xffd2d6f8->Object::Object { 0x57d103c4, 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0x57d103c4, 0xffd2d6f8, 0xffd2d7a8 } -Shifting token 'a' (0xffd2d6f8 'a') -0x57d103d4->Object::Object { 0x57d103c4, 0xffd2d6f8 } -0xffd2d6f8->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d6f8 } +0xffae62df->Object::Object { 0x580893c4 } +0xffae6388->Object::Object { 0x580893c4, 0xffae62df } +0xffae62df->Object::~Object { 0x580893c4, 0xffae62df, 0xffae6388 } +Next token is token 'a' (0xffae6388 'a') +0xffae62d8->Object::Object { 0x580893c4, 0xffae6388 } +0xffae6388->Object::~Object { 0x580893c4, 0xffae62d8, 0xffae6388 } +Shifting token 'a' (0xffae62d8 'a') +0x580893d4->Object::Object { 0x580893c4, 0xffae62d8 } +0xffae62d8->Object::~Object { 0x580893c4, 0x580893d4, 0xffae62d8 } Entering state 1 Stack now 0 10 1 -0xffd2d7b8->Object::Object { 0x57d103c4, 0x57d103d4 } +0xffae6398->Object::Object { 0x580893c4, 0x580893d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57d103d4 'a') --> $$ = nterm item (0xffd2d7b8 'a') -0x57d103d4->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d7b8 } -0x57d103d4->Object::Object { 0x57d103c4, 0xffd2d7b8 } -0xffd2d7b8->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d7b8 } + $1 = token 'a' (0x580893d4 'a') +-> $$ = nterm item (0xffae6398 'a') +0x580893d4->Object::~Object { 0x580893c4, 0x580893d4, 0xffae6398 } +0x580893d4->Object::Object { 0x580893c4, 0xffae6398 } +0xffae6398->Object::~Object { 0x580893c4, 0x580893d4, 0xffae6398 } Entering state 10 Stack now 0 10 10 Reading a token -0xffd2d6ff->Object::Object { 0x57d103c4, 0x57d103d4 } -0xffd2d7a8->Object::Object { 0x57d103c4, 0x57d103d4, 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'a' (0xffd2d7a8 'a') -0xffd2d6f8->Object::Object { 0x57d103c4, 0x57d103d4, 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d6f8, 0xffd2d7a8 } -Shifting token 'a' (0xffd2d6f8 'a') -0x57d103e4->Object::Object { 0x57d103c4, 0x57d103d4, 0xffd2d6f8 } -0xffd2d6f8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6f8 } +0xffae62df->Object::Object { 0x580893c4, 0x580893d4 } +0xffae6388->Object::Object { 0x580893c4, 0x580893d4, 0xffae62df } +0xffae62df->Object::~Object { 0x580893c4, 0x580893d4, 0xffae62df, 0xffae6388 } +Next token is token 'a' (0xffae6388 'a') +0xffae62d8->Object::Object { 0x580893c4, 0x580893d4, 0xffae6388 } +0xffae6388->Object::~Object { 0x580893c4, 0x580893d4, 0xffae62d8, 0xffae6388 } +Shifting token 'a' (0xffae62d8 'a') +0x580893e4->Object::Object { 0x580893c4, 0x580893d4, 0xffae62d8 } +0xffae62d8->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62d8 } Entering state 1 Stack now 0 10 10 1 -0xffd2d7b8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4 } +0xffae6398->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57d103e4 'a') --> $$ = nterm item (0xffd2d7b8 'a') -0x57d103e4->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7b8 } -0x57d103e4->Object::Object { 0x57d103c4, 0x57d103d4, 0xffd2d7b8 } -0xffd2d7b8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7b8 } + $1 = token 'a' (0x580893e4 'a') +-> $$ = nterm item (0xffae6398 'a') +0x580893e4->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6398 } +0x580893e4->Object::Object { 0x580893c4, 0x580893d4, 0xffae6398 } +0xffae6398->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6398 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xffd2d6ff->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4 } -0xffd2d7a8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'a' (0xffd2d7a8 'a') -0xffd2d6f8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6f8, 0xffd2d7a8 } -Shifting token 'a' (0xffd2d6f8 'a') -0x57d103f4->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6f8 } -0xffd2d6f8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d6f8 } +0xffae62df->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4 } +0xffae6388->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62df } +0xffae62df->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62df, 0xffae6388 } +Next token is token 'a' (0xffae6388 'a') +0xffae62d8->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6388 } +0xffae6388->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62d8, 0xffae6388 } +Shifting token 'a' (0xffae62d8 'a') +0x580893f4->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62d8 } +0xffae62d8->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae62d8 } Entering state 1 Stack now 0 10 10 10 1 -0xffd2d7b8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4 } +0xffae6398->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57d103f4 'a') --> $$ = nterm item (0xffd2d7b8 'a') -0x57d103f4->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d7b8 } -0x57d103f4->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7b8 } -0xffd2d7b8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d7b8 } + $1 = token 'a' (0x580893f4 'a') +-> $$ = nterm item (0xffae6398 'a') +0x580893f4->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae6398 } +0x580893f4->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6398 } +0xffae6398->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae6398 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xffd2d6ff->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4 } -0xffd2d7a8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'p' (0xffd2d7a8 'p'Exception caught: cleaning lookahead and stack -0x57d103f4->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d7a8 } -0x57d103e4->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7a8 } -0x57d103d4->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d7a8 } -0x57d103c4->Object::~Object { 0x57d103c4, 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0xffd2d7a8 } +0xffae62df->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4 } +0xffae6388->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae62df } +0xffae62df->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae62df, 0xffae6388 } +Next token is token 'p' (0xffae6388 'p'Exception caught: cleaning lookahead and stack +0x580893f4->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae6388 } +0x580893e4->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6388 } +0x580893d4->Object::~Object { 0x580893c4, 0x580893d4, 0xffae6388 } +0x580893c4->Object::~Object { 0x580893c4, 0xffae6388 } +0xffae6388->Object::~Object { 0xffae6388 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +stderr: +./glr-regression.at:356: $PREPARSER ./glr-regr2a input1.txt Starting parse Entering state 0 Stack now 0 Reading a token -0xffd2d6ff->Object::Object { } -0xffd2d7a8->Object::Object { 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'a' (0xffd2d7a8 'a') -0xffd2d6f8->Object::Object { 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0xffd2d6f8, 0xffd2d7a8 } -Shifting token 'a' (0xffd2d6f8 'a') -0x57d103c4->Object::Object { 0xffd2d6f8 } -0xffd2d6f8->Object::~Object { 0x57d103c4, 0xffd2d6f8 } +0xffae62df->Object::Object { } +0xffae6388->Object::Object { 0xffae62df } +0xffae62df->Object::~Object { 0xffae62df, 0xffae6388 } +Next token is token 'a' (0xffae6388 'a') +0xffae62d8->Object::Object { 0xffae6388 } +0xffae6388->Object::~Object { 0xffae62d8, 0xffae6388 } +Shifting token 'a' (0xffae62d8 'a') +0x580893c4->Object::Object { 0xffae62d8 } +0xffae62d8->Object::~Object { 0x580893c4, 0xffae62d8 } Entering state 1 Stack now 0 1 -0xffd2d7b8->Object::Object { 0x57d103c4 } +0xffae6398->Object::Object { 0x580893c4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57d103c4 'a') --> $$ = nterm item (0xffd2d7b8 'a') -0x57d103c4->Object::~Object { 0x57d103c4, 0xffd2d7b8 } -0x57d103c4->Object::Object { 0xffd2d7b8 } -0xffd2d7b8->Object::~Object { 0x57d103c4, 0xffd2d7b8 } + $1 = token 'a' (0x580893c4 'a') +-> $$ = nterm item (0xffae6398 'a') +0x580893c4->Object::~Object { 0x580893c4, 0xffae6398 } +0x580893c4->Object::Object { 0xffae6398 } +0xffae6398->Object::~Object { 0x580893c4, 0xffae6398 } Entering state 10 Stack now 0 10 Reading a token -0xffd2d6ff->Object::Object { 0x57d103c4 } -0xffd2d7a8->Object::Object { 0x57d103c4, 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0x57d103c4, 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'a' (0xffd2d7a8 'a') -0xffd2d6f8->Object::Object { 0x57d103c4, 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0x57d103c4, 0xffd2d6f8, 0xffd2d7a8 } -Shifting token 'a' (0xffd2d6f8 'a') -0x57d103d4->Object::Object { 0x57d103c4, 0xffd2d6f8 } -0xffd2d6f8->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d6f8 } +0xffae62df->Object::Object { 0x580893c4 } +0xffae6388->Object::Object { 0x580893c4, 0xffae62df } +0xffae62df->Object::~Object { 0x580893c4, 0xffae62df, 0xffae6388 } +Next token is token 'a' (0xffae6388 'a') +0xffae62d8->Object::Object { 0x580893c4, 0xffae6388 } +0xffae6388->Object::~Object { 0x580893c4, 0xffae62d8, 0xffae6388 } +Shifting token 'a' (0xffae62d8 'a') +0x580893d4->Object::Object { 0x580893c4, 0xffae62d8 } +0xffae62d8->Object::~Object { 0x580893c4, 0x580893d4, 0xffae62d8 } Entering state 1 Stack now 0 10 1 -0xffd2d7b8->Object::Object { 0x57d103c4, 0x57d103d4 } +0xffae6398->Object::Object { 0x580893c4, 0x580893d4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57d103d4 'a') --> $$ = nterm item (0xffd2d7b8 'a') -0x57d103d4->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d7b8 } -0x57d103d4->Object::Object { 0x57d103c4, 0xffd2d7b8 } -0xffd2d7b8->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d7b8 } + $1 = token 'a' (0x580893d4 'a') +-> $$ = nterm item (0xffae6398 'a') +0x580893d4->Object::~Object { 0x580893c4, 0x580893d4, 0xffae6398 } +0x580893d4->Object::Object { 0x580893c4, 0xffae6398 } +0xffae6398->Object::~Object { 0x580893c4, 0x580893d4, 0xffae6398 } Entering state 10 Stack now 0 10 10 Reading a token -0xffd2d6ff->Object::Object { 0x57d103c4, 0x57d103d4 } -0xffd2d7a8->Object::Object { 0x57d103c4, 0x57d103d4, 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'a' (0xffd2d7a8 'a') -0xffd2d6f8->Object::Object { 0x57d103c4, 0x57d103d4, 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d6f8, 0xffd2d7a8 } -Shifting token 'a' (0xffd2d6f8 'a') -0x57d103e4->Object::Object { 0x57d103c4, 0x57d103d4, 0xffd2d6f8 } -0xffd2d6f8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6f8 } +0xffae62df->Object::Object { 0x580893c4, 0x580893d4 } +0xffae6388->Object::Object { 0x580893c4, 0x580893d4, 0xffae62df } +0xffae62df->Object::~Object { 0x580893c4, 0x580893d4, 0xffae62df, 0xffae6388 } +Next token is token 'a' (0xffae6388 'a') +0xffae62d8->Object::Object { 0x580893c4, 0x580893d4, 0xffae6388 } +0xffae6388->Object::~Object { 0x580893c4, 0x580893d4, 0xffae62d8, 0xffae6388 } +Shifting token 'a' (0xffae62d8 'a') +0x580893e4->Object::Object { 0x580893c4, 0x580893d4, 0xffae62d8 } +0xffae62d8->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62d8 } Entering state 1 Stack now 0 10 10 1 -0xffd2d7b8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4 } +0xffae6398->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57d103e4 'a') --> $$ = nterm item (0xffd2d7b8 'a') -0x57d103e4->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7b8 } -0x57d103e4->Object::Object { 0x57d103c4, 0x57d103d4, 0xffd2d7b8 } -0xffd2d7b8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7b8 } + $1 = token 'a' (0x580893e4 'a') +-> $$ = nterm item (0xffae6398 'a') +0x580893e4->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6398 } +0x580893e4->Object::Object { 0x580893c4, 0x580893d4, 0xffae6398 } +0xffae6398->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6398 } Entering state 10 Stack now 0 10 10 10 Reading a token -0xffd2d6ff->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4 } -0xffd2d7a8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'a' (0xffd2d7a8 'a') -0xffd2d6f8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6f8, 0xffd2d7a8 } -Shifting token 'a' (0xffd2d6f8 'a') -0x57d103f4->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d6f8 } -0xffd2d6f8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d6f8 } +0xffae62df->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4 } +0xffae6388->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62df } +0xffae62df->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62df, 0xffae6388 } +Next token is token 'a' (0xffae6388 'a') +0xffae62d8->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6388 } +0xffae6388->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62d8, 0xffae6388 } +Shifting token 'a' (0xffae62d8 'a') +0x580893f4->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae62d8 } +0xffae62d8->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae62d8 } Entering state 1 Stack now 0 10 10 10 1 -0xffd2d7b8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4 } +0xffae6398->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x57d103f4 'a') --> $$ = nterm item (0xffd2d7b8 'a') -0x57d103f4->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d7b8 } -0x57d103f4->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7b8 } -0xffd2d7b8->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d7b8 } + $1 = token 'a' (0x580893f4 'a') +-> $$ = nterm item (0xffae6398 'a') +0x580893f4->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae6398 } +0x580893f4->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6398 } +0xffae6398->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae6398 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0xffd2d6ff->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4 } -0xffd2d7a8->Object::Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d6ff } -0xffd2d6ff->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d6ff, 0xffd2d7a8 } -Next token is token 'p' (0xffd2d7a8 'p'Exception caught: cleaning lookahead and stack -0x57d103f4->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0x57d103f4, 0xffd2d7a8 } -0x57d103e4->Object::~Object { 0x57d103c4, 0x57d103d4, 0x57d103e4, 0xffd2d7a8 } -0x57d103d4->Object::~Object { 0x57d103c4, 0x57d103d4, 0xffd2d7a8 } -0x57d103c4->Object::~Object { 0x57d103c4, 0xffd2d7a8 } -0xffd2d7a8->Object::~Object { 0xffd2d7a8 } +0xffae62df->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4 } +0xffae6388->Object::Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae62df } +0xffae62df->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae62df, 0xffae6388 } +Next token is token 'p' (0xffae6388 'p'Exception caught: cleaning lookahead and stack +0x580893f4->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0x580893f4, 0xffae6388 } +0x580893e4->Object::~Object { 0x580893c4, 0x580893d4, 0x580893e4, 0xffae6388 } +0x580893d4->Object::~Object { 0x580893c4, 0x580893d4, 0xffae6388 } +0x580893c4->Object::~Object { 0x580893c4, 0xffae6388 } +0xffae6388->Object::~Object { 0xffae6388 } exception caught: printer end { } +stderr: ./c++.at:1363: grep '^exception caught: printer$' stderr +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae +./glr-regression.at:356: $PREPARSER ./glr-regr2a input2.txt stderr: -stdout: stderr: exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:844: $PREPARSER ./glr-regr7 -stderr: -memory exhausted -./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaE +./glr-regression.at:356: $PREPARSER ./glr-regr2a input3.txt +stderr: stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -736. glr-regression.at:844: ok +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaT -stderr: +719. glr-regression.at:356: stderr: + ok ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaaR - stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -688. c++.at:1363: ok -./glr-regression.at:945: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS - -740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ... -./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y -741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ... -./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y -./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS -./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS -stderr: -stdout: -./c++.at:860: $PREPARSER ./input -stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -stderr: -stdout: -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:944: $PREPARSER ./glr-regr8 -stderr: -./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -738. glr-regression.at:944: ok - -742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ... -./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y -./glr-regression.at:1037: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS -stderr: -stdout: -./glr-regression.at:672: $PREPARSER ./glr-regr5 -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -731. glr-regression.at:672: ok +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 743. glr-regression.at:1038: testing No users destructors if stack 0 deleted: glr2.cc ... ./glr-regression.at:1038: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y ./glr-regression.at:1038: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS -stderr: -stdout: -./glr-regression.at:1036: $PREPARSER ./glr-regr9 -stderr: -memory exhausted -./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -741. glr-regression.at:1036: ok - 744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ... ./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y ./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS stderr: stdout: -./c++.at:860: $PREPARSER ./input +./c++.at:1362: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./glr-regression.at:945: $PREPARSER ./glr-regr8 +exception caught: initial-action +./c++.at:1555: $PREPARSER ./test +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -739. glr-regression.at:945: ok - +./c++.at:1362: $PREPARSER ./input aaaap stderr: stdout: -./glr-regression.at:740: $PREPARSER ./glr-regr6 +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:490: $PREPARSER ./glr-regr3 input.txt stderr: -Ambiguity detected. -Option 1, - start -> - 'a' +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +691. c++.at:1517: ./c++.at:1362: $PREPARSER ./input --debug aaaap +./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok +stderr: +722. glr-regression.at:490: Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xff84cebf->Object::Object { } +0xff84cf68->Object::Object { 0xff84cebf } +0xff84cebf->Object::~Object { 0xff84cebf, 0xff84cf68 } +Next token is token 'a' (0xff84cf68 'a') +0xff84ceb8->Object::Object { 0xff84cf68 } +0xff84cf68->Object::~Object { 0xff84ceb8, 0xff84cf68 } +Shifting token 'a' (0xff84ceb8 'a') +0x567d13c4->Object::Object { 0xff84ceb8 } +0xff84ceb8->Object::~Object { 0x567d13c4, 0xff84ceb8 } +Entering state 2 +Stack now 0 2 +0xff84cf78->Object::Object { 0x567d13c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x567d13c4 'a') +-> $$ = nterm item (0xff84cf78 'a') +0x567d13c4->Object::~Object { 0x567d13c4, 0xff84cf78 } +0x567d13c4->Object::Object { 0xff84cf78 } +0xff84cf78->Object::~Object { 0x567d13c4, 0xff84cf78 } +Entering state 11 +Stack now 0 11 +Reading a token +0xff84cebf->Object::Object { 0x567d13c4 } +0xff84cf68->Object::Object { 0x567d13c4, 0xff84cebf } +0xff84cebf->Object::~Object { 0x567d13c4, 0xff84cebf, 0xff84cf68 } +Next token is token 'a' (0xff84cf68 'a') +0xff84ceb8->Object::Object { 0x567d13c4, 0xff84cf68 } +0xff84cf68->Object::~Object { 0x567d13c4, 0xff84ceb8, 0xff84cf68 } +Shifting token 'a' (0xff84ceb8 'a') +0x567d13d4->Object::Object { 0x567d13c4, 0xff84ceb8 } +0xff84ceb8->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84ceb8 } +Entering state 2 +Stack now 0 11 2 +0xff84cf78->Object::Object { 0x567d13c4, 0x567d13d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x567d13d4 'a') +-> $$ = nterm item (0xff84cf78 'a') +0x567d13d4->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84cf78 } +0x567d13d4->Object::Object { 0x567d13c4, 0xff84cf78 } +0xff84cf78->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84cf78 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xff84cebf->Object::Object { 0x567d13c4, 0x567d13d4 } +0xff84cf68->Object::Object { 0x567d13c4, 0x567d13d4, 0xff84cebf } +0xff84cebf->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84cebf, 0xff84cf68 } +Next token is token 'a' (0xff84cf68 'a') +0xff84ceb8->Object::Object { 0x567d13c4, 0x567d13d4, 0xff84cf68 } +0xff84cf68->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84ceb8, 0xff84cf68 } +Shifting token 'a' (0xff84ceb8 'a') +0x567d13e4->Object::Object { 0x567d13c4, 0x567d13d4, 0xff84ceb8 } +0xff84ceb8->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84ceb8 } +Entering state 2 +Stack now 0 11 11 2 +0xff84cf78->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x567d13e4 'a') +-> $$ = nterm item (0xff84cf78 'a') +0x567d13e4->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf78 } +0x567d13e4->Object::Object { 0x567d13c4, 0x567d13d4, 0xff84cf78 } +0xff84cf78->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf78 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xff84cebf->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4 } +0xff84cf68->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cebf } +0xff84cebf->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cebf, 0xff84cf68 } +Next token is token 'a' (0xff84cf68 'a') +0xff84ceb8->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf68 } +0xff84cf68->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84ceb8, 0xff84cf68 } +Shifting token 'a' (0xff84ceb8 'a') +0x567d13f4->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84ceb8 } +0xff84ceb8->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84ceb8 } +Entering state 2 +Stack now 0 11 11 11 2 +0xff84cf78->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x567d13f4 'a') +-> $$ = nterm item (0xff84cf78 'a') +0x567d13f4->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cf78 } +0x567d13f4->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf78 } +0xff84cf78->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cf78 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xff84cebf->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4 } +0xff84cf68->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cebf } +0xff84cebf->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cebf, 0xff84cf68 } +Next token is token 'p' (0xff84cf68 'p'Exception caught: cleaning lookahead and stack +0x567d13f4->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cf68 } +0x567d13e4->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf68 } +0x567d13d4->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84cf68 } +0x567d13c4->Object::~Object { 0x567d13c4, 0xff84cf68 } +0xff84cf68->Object::~Object { 0xff84cf68 } +exception caught: printer +end { } + ok +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xff84cebf->Object::Object { } +0xff84cf68->Object::Object { 0xff84cebf } +0xff84cebf->Object::~Object { 0xff84cebf, 0xff84cf68 } +Next token is token 'a' (0xff84cf68 'a') +0xff84ceb8->Object::Object { 0xff84cf68 } +0xff84cf68->Object::~Object { 0xff84ceb8, 0xff84cf68 } +Shifting token 'a' (0xff84ceb8 'a') +0x567d13c4->Object::Object { 0xff84ceb8 } +0xff84ceb8->Object::~Object { 0x567d13c4, 0xff84ceb8 } +Entering state 2 +Stack now 0 2 +0xff84cf78->Object::Object { 0x567d13c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x567d13c4 'a') +-> $$ = nterm item (0xff84cf78 'a') +0x567d13c4->Object::~Object { 0x567d13c4, 0xff84cf78 } +0x567d13c4->Object::Object { 0xff84cf78 } +0xff84cf78->Object::~Object { 0x567d13c4, 0xff84cf78 } +Entering state 11 +Stack now 0 11 +Reading a token +0xff84cebf->Object::Object { 0x567d13c4 } +0xff84cf68->Object::Object { 0x567d13c4, 0xff84cebf } +0xff84cebf->Object::~Object { 0x567d13c4, 0xff84cebf, 0xff84cf68 } +Next token is token 'a' (0xff84cf68 'a') +0xff84ceb8->Object::Object { 0x567d13c4, 0xff84cf68 } +0xff84cf68->Object::~Object { 0x567d13c4, 0xff84ceb8, 0xff84cf68 } +Shifting token 'a' (0xff84ceb8 'a') +0x567d13d4->Object::Object { 0x567d13c4, 0xff84ceb8 } +0xff84ceb8->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84ceb8 } +Entering state 2 +Stack now 0 11 2 +0xff84cf78->Object::Object { 0x567d13c4, 0x567d13d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x567d13d4 'a') +-> $$ = nterm item (0xff84cf78 'a') +0x567d13d4->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84cf78 } +0x567d13d4->Object::Object { 0x567d13c4, 0xff84cf78 } +0xff84cf78->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84cf78 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xff84cebf->Object::Object { 0x567d13c4, 0x567d13d4 } +0xff84cf68->Object::Object { 0x567d13c4, 0x567d13d4, 0xff84cebf } +0xff84cebf->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84cebf, 0xff84cf68 } +Next token is token 'a' (0xff84cf68 'a') +0xff84ceb8->Object::Object { 0x567d13c4, 0x567d13d4, 0xff84cf68 } +0xff84cf68->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84ceb8, 0xff84cf68 } +Shifting token 'a' (0xff84ceb8 'a') +0x567d13e4->Object::Object { 0x567d13c4, 0x567d13d4, 0xff84ceb8 } +0xff84ceb8->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84ceb8 } +Entering state 2 +Stack now 0 11 11 2 +0xff84cf78->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x567d13e4 'a') +-> $$ = nterm item (0xff84cf78 'a') +0x567d13e4->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf78 } +0x567d13e4->Object::Object { 0x567d13c4, 0x567d13d4, 0xff84cf78 } +0xff84cf78->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf78 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xff84cebf->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4 } +0xff84cf68->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cebf } +0xff84cebf->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cebf, 0xff84cf68 } +Next token is token 'a' (0xff84cf68 'a') +0xff84ceb8->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf68 } +0xff84cf68->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84ceb8, 0xff84cf68 } +Shifting token 'a' (0xff84ceb8 'a') +0x567d13f4->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84ceb8 } +0xff84ceb8->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84ceb8 } +Entering state 2 +Stack now 0 11 11 11 2 +0xff84cf78->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x567d13f4 'a') +-> $$ = nterm item (0xff84cf78 'a') +0x567d13f4->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cf78 } +0x567d13f4->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf78 } +0xff84cf78->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cf78 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xff84cebf->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4 } +0xff84cf68->Object::Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cebf } +0xff84cebf->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cebf, 0xff84cf68 } +Next token is token 'p' (0xff84cf68 'p'Exception caught: cleaning lookahead and stack +0x567d13f4->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0x567d13f4, 0xff84cf68 } +0x567d13e4->Object::~Object { 0x567d13c4, 0x567d13d4, 0x567d13e4, 0xff84cf68 } +0x567d13d4->Object::~Object { 0x567d13c4, 0x567d13d4, 0xff84cf68 } +0x567d13c4->Object::~Object { 0x567d13c4, 0xff84cf68 } +0xff84cf68->Object::~Object { 0xff84cf68 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr -Option 2, - start -> - 'a' +stdout: +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae -syntax is ambiguous -./glr-regression.at:740: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -734. glr-regression.at:740: ok -./glr-regression.at:1102: $PREPARSER ./glr-regr10 +./glr-regression.at:844: $PREPARSER ./glr-regr7 stderr: -./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -744. glr-regression.at:1102: ok +memory exhausted +./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +736. glr-regression.at:844: ok + 745. glr-regression.at:1103: testing Corrupted semantic options if user action cuts parse: glr.cc ... ./glr-regression.at:1103: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y - - -./glr-regression.at:1103: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS 746. glr-regression.at:1104: testing Corrupted semantic options if user action cuts parse: glr2.cc ... ./glr-regression.at:1104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y +./glr-regression.at:1103: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS +./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS 747. glr-regression.at:1174: testing Undesirable destructors if user action cuts parse: glr.c ... ./glr-regression.at:1174: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.c glr-regr11.y -./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS +./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS stderr: stdout: -./glr-regression.at:1037: $PREPARSER ./glr-regr9 +./glr-regression.at:944: $PREPARSER ./glr-regr8 stderr: -memory exhausted -./glr-regression.at:1037: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS -742. glr-regression.at:1037: ok +./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +738. glr-regression.at:944: ok -748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ... -./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y stderr: stdout: -./glr-regression.at:845: $PREPARSER ./glr-regr7 -./glr-regression.at:1175: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS +./glr-regression.at:1036: $PREPARSER ./glr-regr9 stderr: memory exhausted -./glr-regression.at:845: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -737. glr-regression.at:845: ok +./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +741. glr-regression.at:1036: ok +748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ... +./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y 749. glr-regression.at:1176: testing Undesirable destructors if user action cuts parse: glr2.cc ... ./glr-regression.at:1176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y -stderr: -stdout: -./c++.at:860: $PREPARSER ./input -stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1175: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS ./glr-regression.at:1176: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS stderr: stdout: -./glr-regression.at:946: $PREPARSER ./glr-regr8 +./glr-regression.at:594: $PREPARSER ./glr-regr4 stderr: -./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -740. glr-regression.at:946: ok +./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +725. glr-regression.at:594: ok +750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ... +./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y stderr: stdout: -./glr-regression.at:1174: $PREPARSER ./glr-regr11 +./glr-regression.at:1102: $PREPARSER ./glr-regr10 stderr: -./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -747. glr-regression.at:1174: ok +./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +744. glr-regression.at:1102: ok -750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ... -./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y ./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS -751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ... -./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y -./glr-regression.at:1311: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS stderr: stdout: -./glr-regression.at:1103: $PREPARSER ./glr-regr10 +./c++.at:851: $PREPARSER ./input stderr: -./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -745. glr-regression.at:1103: ok - -752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ... -./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ... stdout: -./c++.at:860: $PREPARSER ./input -stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS stderr: +./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y stdout: -./glr-regression.at:1175: $PREPARSER ./glr-regr11 +./glr-regression.at:598: $PREPARSER ./glr-regr4 +./glr-regression.at:945: $PREPARSER ./glr-regr8 stderr: -./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -748. glr-regression.at:1175: ok stderr: -stdout: -./glr-regression.at:1310: $PREPARSER ./glr-regr12 +./glr-regression.at:598: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +728. glr-regression.at:598: ok +./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +739. glr-regression.at:945: ok -stderr: -./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -750. glr-regression.at:1310: ok +678. c++.at:848: ok -753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ... -./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y -./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS -754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ... -./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y -./glr-regression.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS + +./glr-regression.at:1311: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS +752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ... +./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y stderr: +753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ... stdout: -./glr-regression.at:1038: $PREPARSER ./glr-regr9 +./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y +./glr-regression.at:1037: $PREPARSER ./glr-regr9 stderr: memory exhausted -./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -743. glr-regression.at:1038: ok +./glr-regression.at:1037: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +742. glr-regression.at:1037: ok +754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ... +./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y +./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS +./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS 755. glr-regression.at:1447: testing Incorrect lookahead during deterministic GLR: glr2.cc ... ./glr-regression.at:1447: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y -./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS +./glr-regression.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS stderr: stdout: -./glr-regression.at:1311: $PREPARSER ./glr-regr12 +./glr-regression.at:1174: $PREPARSER ./glr-regr11 stderr: -./glr-regression.at:1311: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -751. glr-regression.at:1311: ok +./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +747. glr-regression.at:1174: ok +./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS stderr: stdout: -./c++.at:860: $PREPARSER ./input -stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:1104: $PREPARSER ./glr-regr10 -stderr: -./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: $PREPARSER ./input stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy 756. glr-regression.at:1678: testing Incorrect lookahead during nondeterministic GLR: glr.c ... ./glr-regression.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.c glr-regr14.y -746. glr-regression.at:1104: ok +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: -./glr-regression.at:1445: $PREPARSER ./glr-regr13 +./glr-regression.at:672: $PREPARSER ./glr-regr5 stderr: +Ambiguity detected. +Option 1, + start -> + 'a' -./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -753. glr-regression.at:1445: ok +Option 2, + start -> + 'a' +syntax is ambiguous +./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +731. glr-regression.at:672: ok ./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS + 757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ... ./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y -758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ... -./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y ./glr-regression.at:1679: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS -./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS stderr: stdout: -./glr-regression.at:1446: $PREPARSER ./glr-regr13 +./glr-regression.at:1310: $PREPARSER ./glr-regr12 stderr: -./glr-regression.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -754. glr-regression.at:1446: ok +./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +750. glr-regression.at:1310: ok -759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ... -./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y -./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS stderr: stdout: -./glr-regression.at:1176: $PREPARSER ./glr-regr11 +./glr-regression.at:1103: $PREPARSER ./glr-regr10 stderr: -./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -749. glr-regression.at:1176: ok +./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +745. glr-regression.at:1103: ok stderr: stdout: -./glr-regression.at:1678: $PREPARSER ./glr-regr14 +./glr-regression.at:740: $PREPARSER ./glr-regr6 stderr: -./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -756. glr-regression.at:1678: ok +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:740: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +734. glr-regression.at:740: ok +758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ... +./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y +759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ... +./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y 760. glr-regression.at:1786: testing Leaked semantic values when reporting ambiguity: glr.cc ... ./glr-regression.at:1786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y -761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ... -./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y +./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS +./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS +./glr-regression.at:1786: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS stderr: stdout: -./c++.at:860: $PREPARSER ./input -./glr-regression.at:1786: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS +./glr-regression.at:1445: $PREPARSER ./glr-regr13 stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS +./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +753. glr-regression.at:1445: ok + stderr: stdout: -./glr-regression.at:1785: $PREPARSER ./glr-regr15 +./glr-regression.at:1175: $PREPARSER ./glr-regr11 stderr: -Ambiguity detected. -Option 1, - ambiguity -> - ambiguity1 -> - -Option 2, - ambiguity -> - ambiguity2 -> - -syntax is ambiguous -./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -759. glr-regression.at:1785: ok +./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +748. glr-regression.at:1175: ok +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ... +./c++.at:1361: $PREPARSER ./input i +./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y +stderr: +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x572e5a00->Object::Object { } +Next token is token 'a' (0x572e5a00 'a') +Shifting token 'a' (0x572e5a00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x572e5a00 'a') +-> $$ = nterm item (0x572e5a00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x572e5a30->Object::Object { 0x572e5a00 } +Next token is token 'a' (0x572e5a30 'a') +Shifting token 'a' (0x572e5a30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x572e5a30 'a') +-> $$ = nterm item (0x572e5a30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x572e5a60->Object::Object { 0x572e5a00, 0x572e5a30 } +Next token is token 'a' (0x572e5a60 'a') +Shifting token 'a' (0x572e5a60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x572e5a60 'a') +-> $$ = nterm item (0x572e5a60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x572e5a90->Object::Object { 0x572e5a00, 0x572e5a30, 0x572e5a60 } +Next token is token 'a' (0x572e5a90 'a') +Shifting token 'a' (0x572e5a90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x572e5a90 'a') +-> $$ = nterm item (0x572e5a90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x572e5ac0->Object::Object { 0x572e5a00, 0x572e5a30, 0x572e5a60, 0x572e5a90 } +Next token is token 'p' (0x572e5ac0 'p'Exception caught: cleaning lookahead and stack +0x572e5ac0->Object::~Object { 0x572e5a00, 0x572e5a30, 0x572e5a60, 0x572e5a90, 0x572e5ac0 } +0x572e5a90->Object::~Object { 0x572e5a00, 0x572e5a30, 0x572e5a60, 0x572e5a90 } +0x572e5a60->Object::~Object { 0x572e5a00, 0x572e5a30, 0x572e5a60 } +0x572e5a30->Object::~Object { 0x572e5a00, 0x572e5a30 } +0x572e5a00->Object::~Object { 0x572e5a00 } +exception caught: printer +end { } +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x572e5a00->Object::Object { } +Next token is token 'a' (0x572e5a00 'a') +Shifting token 'a' (0x572e5a00 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x572e5a00 'a') +-> $$ = nterm item (0x572e5a00 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x572e5a30->Object::Object { 0x572e5a00 } +Next token is token 'a' (0x572e5a30 'a') +Shifting token 'a' (0x572e5a30 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x572e5a30 'a') +-> $$ = nterm item (0x572e5a30 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x572e5a60->Object::Object { 0x572e5a00, 0x572e5a30 } +Next token is token 'a' (0x572e5a60 'a') +Shifting token 'a' (0x572e5a60 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x572e5a60 'a') +-> $$ = nterm item (0x572e5a60 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x572e5a90->Object::Object { 0x572e5a00, 0x572e5a30, 0x572e5a60 } +Next token is token 'a' (0x572e5a90 'a') +Shifting token 'a' (0x572e5a90 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x572e5a90 'a') +-> $$ = nterm item (0x572e5a90 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x572e5ac0->Object::Object { 0x572e5a00, 0x572e5a30, 0x572e5a60, 0x572e5a90 } +Next token is token 'p' (0x572e5ac0 'p'Exception caught: cleaning lookahead and stack +0x572e5ac0->Object::~Object { 0x572e5a00, 0x572e5a30, 0x572e5a60, 0x572e5a90, 0x572e5ac0 } +0x572e5a90->Object::~Object { 0x572e5a00, 0x572e5a30, 0x572e5a60, 0x572e5a90 } +0x572e5a60->Object::~Object { 0x572e5a00, 0x572e5a30, 0x572e5a60 } +0x572e5a30->Object::~Object { 0x572e5a00, 0x572e5a30 } +0x572e5a00->Object::~Object { 0x572e5a00 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr +stdout: 762. glr-regression.at:1860: testing Leaked lookahead after nondeterministic parse syntax error: glr.c ... ./glr-regression.at:1860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.c glr-regr16.y +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS +./c++.at:1361: $PREPARSER ./input aaaaE ./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS stderr: -stdout: -./glr-regression.at:1312: $PREPARSER ./glr-regr12 +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT stderr: -./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -752. glr-regression.at:1312: ok +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +686. c++.at:1361: ok stderr: stdout: -./glr-regression.at:1679: $PREPARSER ./glr-regr14 +./glr-regression.at:1678: $PREPARSER ./glr-regr14 +stderr: +./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 763. glr-regression.at:1861: testing Leaked lookahead after nondeterministic parse syntax error: glr.cc ... ./glr-regression.at:1861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y -stderr: -./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -757. glr-regression.at:1679: ok +756. glr-regression.at:1678: ok -./glr-regression.at:1861: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS -764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ... -./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y -./glr-regression.at:1862: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS stderr: stdout: -./c++.at:860: $PREPARSER ./input +./glr-regression.at:845: $PREPARSER ./glr-regr7 +./glr-regression.at:1861: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS stderr: -./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -679. c++.at:854: ok +memory exhausted +./glr-regression.at:845: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +737. glr-regression.at:845: ok stderr: stdout: -./glr-regression.at:1860: $PREPARSER ./glr-regr16 +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ... +./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./glr-regression.at:1311: $PREPARSER ./glr-regr12 stderr: -syntax error -./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -762. glr-regression.at:1860: ok - 765. glr-regression.at:1964: testing Uninitialized location when reporting ambiguity: glr.c api.pure ... ./glr-regression.at:1964: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.c glr-regr17.y +./glr-regression.at:1311: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./glr-regression.at:1964: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr17 glr-regr17.c $LIBS +./glr-regression.at:1862: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS +./c++.at:1066: $PREPARSER ./input < in +751. glr-regression.at:1311: ok + +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ... ./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y -./glr-regression.at:1965: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaal stderr: stdout: -./glr-regression.at:1786: $PREPARSER ./glr-regr15 +./glr-regression.at:1785: $PREPARSER ./glr-regr15 +stderr: stderr: +exception caught: yylex Ambiguity detected. Option 1, ambiguity -> @@ -271115,78 +269972,589 @@ ambiguity2 -> syntax is ambiguous -./glr-regression.at:1786: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -760. glr-regression.at:1786: ok - -767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ... -./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y -./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in stderr: -stdout: -./glr-regression.at:1964: $PREPARSER ./glr-regr17 +./c++.at:1363: $PREPARSER ./input i +error: invalid expression +759. glr-regression.at:1785: ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ok stderr: -Ambiguity detected. -Option 1, - start -> - ambig1 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty1 -> - -Option 2, - start -> - ambig2 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty2 -> +./c++.at:1066: $PREPARSER ./input < in +stderr: +exception caught: initial-action +error: invalid character -1.1-2.2: syntax is ambiguous -./glr-regression.at:1964: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -765. glr-regression.at:1964: ok +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +684. c++.at:1066: ok stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffb5b9df->Object::Object { } +0xffb5ba88->Object::Object { 0xffb5b9df } +0xffb5b9df->Object::~Object { 0xffb5b9df, 0xffb5ba88 } +Next token is token 'a' (0xffb5ba88 'a') +0xffb5b9d8->Object::Object { 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0xffb5b9d8, 0xffb5ba88 } +Shifting token 'a' (0xffb5b9d8 'a') +0x56afd3c4->Object::Object { 0xffb5b9d8 } +0xffb5b9d8->Object::~Object { 0x56afd3c4, 0xffb5b9d8 } +Entering state 1 +Stack now 0 1 +0xffb5ba98->Object::Object { 0x56afd3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56afd3c4 'a') +-> $$ = nterm item (0xffb5ba98 'a') +0x56afd3c4->Object::~Object { 0x56afd3c4, 0xffb5ba98 } +0x56afd3c4->Object::Object { 0xffb5ba98 } +0xffb5ba98->Object::~Object { 0x56afd3c4, 0xffb5ba98 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffb5b9df->Object::Object { 0x56afd3c4 } +0xffb5ba88->Object::Object { 0x56afd3c4, 0xffb5b9df } +0xffb5b9df->Object::~Object { 0x56afd3c4, 0xffb5b9df, 0xffb5ba88 } +Next token is token 'a' (0xffb5ba88 'a') +0xffb5b9d8->Object::Object { 0x56afd3c4, 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0x56afd3c4, 0xffb5b9d8, 0xffb5ba88 } +Shifting token 'a' (0xffb5b9d8 'a') +0x56afd3d4->Object::Object { 0x56afd3c4, 0xffb5b9d8 } +0xffb5b9d8->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9d8 } +Entering state 1 +Stack now 0 10 1 +0xffb5ba98->Object::Object { 0x56afd3c4, 0x56afd3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56afd3d4 'a') +-> $$ = nterm item (0xffb5ba98 'a') +0x56afd3d4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba98 } +0x56afd3d4->Object::Object { 0x56afd3c4, 0xffb5ba98 } +0xffb5ba98->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba98 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffb5b9df->Object::Object { 0x56afd3c4, 0x56afd3d4 } +0xffb5ba88->Object::Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9df } +0xffb5b9df->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9df, 0xffb5ba88 } +Next token is token 'a' (0xffb5ba88 'a') +0xffb5b9d8->Object::Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9d8, 0xffb5ba88 } +Shifting token 'a' (0xffb5b9d8 'a') +0x56afd3e4->Object::Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9d8 } +0xffb5b9d8->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9d8 } +Entering state 1 +Stack now 0 10 10 1 +0xffb5ba98->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56afd3e4 'a') +-> $$ = nterm item (0xffb5ba98 'a') +0x56afd3e4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba98 } +0x56afd3e4->Object::Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba98 } +0xffb5ba98->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba98 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffb5b9df->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4 } +0xffb5ba88->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9df } +0xffb5b9df->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9df, 0xffb5ba88 } +Next token is token 'a' (0xffb5ba88 'a') +0xffb5b9d8->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9d8, 0xffb5ba88 } +Shifting token 'a' (0xffb5b9d8 'a') +0x56afd3f4->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9d8 } +0xffb5b9d8->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5b9d8 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffb5ba98->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56afd3f4 'a') +-> $$ = nterm item (0xffb5ba98 'a') +0x56afd3f4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5ba98 } +0x56afd3f4->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba98 } +0xffb5ba98->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5ba98 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffb5b9df->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4 } +0xffb5ba88->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5b9df } +0xffb5b9df->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5b9df, 0xffb5ba88 } +Next token is token 'p' (0xffb5ba88 'p'Exception caught: cleaning lookahead and stack +0x56afd3f4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5ba88 } +0x56afd3e4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba88 } +0x56afd3d4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba88 } +0x56afd3c4->Object::~Object { 0x56afd3c4, 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0xffb5ba88 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffb5b9df->Object::Object { } +0xffb5ba88->Object::Object { 0xffb5b9df } +0xffb5b9df->Object::~Object { 0xffb5b9df, 0xffb5ba88 } +Next token is token 'a' (0xffb5ba88 'a') +0xffb5b9d8->Object::Object { 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0xffb5b9d8, 0xffb5ba88 } +Shifting token 'a' (0xffb5b9d8 'a') +0x56afd3c4->Object::Object { 0xffb5b9d8 } +0xffb5b9d8->Object::~Object { 0x56afd3c4, 0xffb5b9d8 } +Entering state 1 +Stack now 0 1 +0xffb5ba98->Object::Object { 0x56afd3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56afd3c4 'a') +-> $$ = nterm item (0xffb5ba98 'a') +0x56afd3c4->Object::~Object { 0x56afd3c4, 0xffb5ba98 } +0x56afd3c4->Object::Object { 0xffb5ba98 } +0xffb5ba98->Object::~Object { 0x56afd3c4, 0xffb5ba98 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffb5b9df->Object::Object { 0x56afd3c4 } +0xffb5ba88->Object::Object { 0x56afd3c4, 0xffb5b9df } +0xffb5b9df->Object::~Object { 0x56afd3c4, 0xffb5b9df, 0xffb5ba88 } +Next token is token 'a' (0xffb5ba88 'a') +0xffb5b9d8->Object::Object { 0x56afd3c4, 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0x56afd3c4, 0xffb5b9d8, 0xffb5ba88 } +Shifting token 'a' (0xffb5b9d8 'a') +0x56afd3d4->Object::Object { 0x56afd3c4, 0xffb5b9d8 } +0xffb5b9d8->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9d8 } +Entering state 1 +Stack now 0 10 1 +0xffb5ba98->Object::Object { 0x56afd3c4, 0x56afd3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56afd3d4 'a') +-> $$ = nterm item (0xffb5ba98 'a') +0x56afd3d4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba98 } +0x56afd3d4->Object::Object { 0x56afd3c4, 0xffb5ba98 } +0xffb5ba98->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba98 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffb5b9df->Object::Object { 0x56afd3c4, 0x56afd3d4 } +0xffb5ba88->Object::Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9df } +0xffb5b9df->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9df, 0xffb5ba88 } +Next token is token 'a' (0xffb5ba88 'a') +0xffb5b9d8->Object::Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9d8, 0xffb5ba88 } +Shifting token 'a' (0xffb5b9d8 'a') +0x56afd3e4->Object::Object { 0x56afd3c4, 0x56afd3d4, 0xffb5b9d8 } +0xffb5b9d8->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9d8 } +Entering state 1 +Stack now 0 10 10 1 +0xffb5ba98->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56afd3e4 'a') +-> $$ = nterm item (0xffb5ba98 'a') +0x56afd3e4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba98 } +0x56afd3e4->Object::Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba98 } +0xffb5ba98->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba98 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffb5b9df->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4 } +0xffb5ba88->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9df } +0xffb5b9df->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9df, 0xffb5ba88 } +Next token is token 'a' (0xffb5ba88 'a') +0xffb5b9d8->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9d8, 0xffb5ba88 } +Shifting token 'a' (0xffb5b9d8 'a') +0x56afd3f4->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5b9d8 } +0xffb5b9d8->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5b9d8 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffb5ba98->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x56afd3f4 'a') +-> $$ = nterm item (0xffb5ba98 'a') +0x56afd3f4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5ba98 } +0x56afd3f4->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba98 } +0xffb5ba98->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5ba98 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffb5b9df->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4 } +0xffb5ba88->Object::Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5b9df } +0xffb5b9df->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5b9df, 0xffb5ba88 } +Next token is token 'p' (0xffb5ba88 'p'Exception caught: cleaning lookahead and stack +0x56afd3f4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0x56afd3f4, 0xffb5ba88 } +0x56afd3e4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0x56afd3e4, 0xffb5ba88 } +0x56afd3d4->Object::~Object { 0x56afd3c4, 0x56afd3d4, 0xffb5ba88 } +0x56afd3c4->Object::~Object { 0x56afd3c4, 0xffb5ba88 } +0xffb5ba88->Object::~Object { 0xffb5ba88 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr +./glr-regression.at:1965: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS stdout: -./glr-regression.at:1447: $PREPARSER ./glr-regr13 +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae stderr: -./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -755. glr-regression.at:1447: ok - +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ... +./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y 768. glr-regression.at:2035: testing Missed %merge type warnings when LHS type is declared later: glr.c ... ./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS 768. glr-regression.at:2035: ok +stderr: +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: 769. glr-regression.at:2036: testing Missed %merge type warnings when LHS type is declared later: glr.cc ... +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:2036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +stderr: +./c++.at:1362: $PREPARSER ./input aaaap +stdout: +./glr-regression.at:1860: $PREPARSER ./glr-regr16 +stderr: +stderr: +syntax error +./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 769. glr-regression.at:2036: ok +./c++.at:1362: $PREPARSER ./input --debug aaaap +stderr: +stdout: +./glr-regression.at:946: $PREPARSER ./glr-regr8 +stderr: +762. glr-regression.at:1860: ok +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xff8dc46f->Object::Object { } +0xff8dc518->Object::Object { 0xff8dc46f } +0xff8dc46f->Object::~Object { 0xff8dc46f, 0xff8dc518 } +Next token is token 'a' (0xff8dc518 'a') +0xff8dc468->Object::Object { 0xff8dc518 } +0xff8dc518->Object::~Object { 0xff8dc468, 0xff8dc518 } +Shifting token 'a' (0xff8dc468 'a') +0x578db3c4->Object::Object { 0xff8dc468 } +0xff8dc468->Object::~Object { 0x578db3c4, 0xff8dc468 } +Entering state 2 +Stack now 0 2 +0xff8dc528->Object::Object { 0x578db3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x578db3c4 'a') +-> $$ = nterm item (0xff8dc528 'a') +0x578db3c4->Object::~Object { 0x578db3c4, 0xff8dc528 } +0x578db3c4->Object::Object { 0xff8dc528 } +0xff8dc528->Object::~Object { 0x578db3c4, 0xff8dc528 } +Entering state 11 +Stack now 0 11 +Reading a token +0xff8dc46f->Object::Object { 0x578db3c4 } +0xff8dc518->Object::Object { 0x578db3c4, 0xff8dc46f } +0xff8dc46f->Object::~Object { 0x578db3c4, 0xff8dc46f, 0xff8dc518 } +Next token is token 'a' (0xff8dc518 'a') +0xff8dc468->Object::Object { 0x578db3c4, 0xff8dc518 } +0xff8dc518->Object::~Object { 0x578db3c4, 0xff8dc468, 0xff8dc518 } +Shifting token 'a' (0xff8dc468 'a') +0x578db3d4->Object::Object { 0x578db3c4, 0xff8dc468 } +0xff8dc468->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc468 } +Entering state 2 +Stack now 0 11 2 +0xff8dc528->Object::Object { 0x578db3c4, 0x578db3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x578db3d4 'a') +-> $$ = nterm item (0xff8dc528 'a') +0x578db3d4->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc528 } +0x578db3d4->Object::Object { 0x578db3c4, 0xff8dc528 } +0xff8dc528->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc528 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xff8dc46f->Object::Object { 0x578db3c4, 0x578db3d4 } +0xff8dc518->Object::Object { 0x578db3c4, 0x578db3d4, 0xff8dc46f } +0xff8dc46f->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc46f, 0xff8dc518 } +Next token is token 'a' (0xff8dc518 'a') +0xff8dc468->Object::Object { 0x578db3c4, 0x578db3d4, 0xff8dc518 } +0xff8dc518->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc468, 0xff8dc518 } +Shifting token 'a' (0xff8dc468 'a') +0x578db3e4->Object::Object { 0x578db3c4, 0x578db3d4, 0xff8dc468 } +0xff8dc468->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc468 } +Entering state 2 +Stack now 0 11 11 2 +0xff8dc528->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x578db3e4 'a') +-> $$ = nterm item (0xff8dc528 'a') +0x578db3e4->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc528 } +0x578db3e4->Object::Object { 0x578db3c4, 0x578db3d4, 0xff8dc528 } +0xff8dc528->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc528 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xff8dc46f->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4 } +0xff8dc518->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc46f } +0xff8dc46f->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc46f, 0xff8dc518 } +Next token is token 'a' (0xff8dc518 'a') +0xff8dc468->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc518 } +0xff8dc518->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc468, 0xff8dc518 } +Shifting token 'a' (0xff8dc468 'a') +0x578db3f4->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc468 } +0xff8dc468->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc468 } +Entering state 2 +Stack now 0 11 11 11 2 +0xff8dc528->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x578db3f4 'a') +-> $$ = nterm item (0xff8dc528 'a') +0x578db3f4->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc528 } +0x578db3f4->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc528 } +0xff8dc528->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc528 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xff8dc46f->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4 } +0xff8dc518->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc46f } +0xff8dc46f->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc46f, 0xff8dc518 } +Next token is token 'p' (0xff8dc518 'p'Exception caught: cleaning lookahead and stack +0x578db3f4->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc518 } +0x578db3e4->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc518 } +0x578db3d4->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc518 } +0x578db3c4->Object::~Object { 0x578db3c4, 0xff8dc518 } +0xff8dc518->Object::~Object { 0xff8dc518 } +exception caught: printer +end { } +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ... -./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y -770. glr-regression.at:2037: ok +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xff8dc46f->Object::Object { } +0xff8dc518->Object::Object { 0xff8dc46f } +0xff8dc46f->Object::~Object { 0xff8dc46f, 0xff8dc518 } +Next token is token 'a' (0xff8dc518 'a') +0xff8dc468->Object::Object { 0xff8dc518 } +0xff8dc518->Object::~Object { 0xff8dc468, 0xff8dc518 } +Shifting token 'a' (0xff8dc468 'a') +0x578db3c4->Object::Object { 0xff8dc468 } +0xff8dc468->Object::~Object { 0x578db3c4, 0xff8dc468 } +Entering state 2 +Stack now 0 2 +0xff8dc528->Object::Object { 0x578db3c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x578db3c4 'a') +-> $$ = nterm item (0xff8dc528 'a') +0x578db3c4->Object::~Object { 0x578db3c4, 0xff8dc528 } +0x578db3c4->Object::Object { 0xff8dc528 } +0xff8dc528->Object::~Object { 0x578db3c4, 0xff8dc528 } +Entering state 11 +Stack now 0 11 +Reading a token +0xff8dc46f->Object::Object { 0x578db3c4 } +0xff8dc518->Object::Object { 0x578db3c4, 0xff8dc46f } +0xff8dc46f->Object::~Object { 0x578db3c4, 0xff8dc46f, 0xff8dc518 } +Next token is token 'a' (0xff8dc518 'a') +0xff8dc468->Object::Object { 0x578db3c4, 0xff8dc518 } +0xff8dc518->Object::~Object { 0x578db3c4, 0xff8dc468, 0xff8dc518 } +Shifting token 'a' (0xff8dc468 'a') +0x578db3d4->Object::Object { 0x578db3c4, 0xff8dc468 } +0xff8dc468->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc468 } +Entering state 2 +Stack now 0 11 2 +0xff8dc528->Object::Object { 0x578db3c4, 0x578db3d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x578db3d4 'a') +-> $$ = nterm item (0xff8dc528 'a') +0x578db3d4->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc528 } +0x578db3d4->Object::Object { 0x578db3c4, 0xff8dc528 } +0xff8dc528->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc528 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xff8dc46f->Object::Object { 0x578db3c4, 0x578db3d4 } +0xff8dc518->Object::Object { 0x578db3c4, 0x578db3d4, 0xff8dc46f } +0xff8dc46f->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc46f, 0xff8dc518 } +Next token is token 'a' (0xff8dc518 'a') +0xff8dc468->Object::Object { 0x578db3c4, 0x578db3d4, 0xff8dc518 } +0xff8dc518->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc468, 0xff8dc518 } +Shifting token 'a' (0xff8dc468 'a') +0x578db3e4->Object::Object { 0x578db3c4, 0x578db3d4, 0xff8dc468 } +0xff8dc468->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc468 } +Entering state 2 +Stack now 0 11 11 2 +0xff8dc528->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x578db3e4 'a') +-> $$ = nterm item (0xff8dc528 'a') +0x578db3e4->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc528 } +0x578db3e4->Object::Object { 0x578db3c4, 0x578db3d4, 0xff8dc528 } +0xff8dc528->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc528 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xff8dc46f->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4 } +0xff8dc518->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc46f } +0xff8dc46f->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc46f, 0xff8dc518 } +Next token is token 'a' (0xff8dc518 'a') +0xff8dc468->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc518 } +0xff8dc518->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc468, 0xff8dc518 } +Shifting token 'a' (0xff8dc468 'a') +0x578db3f4->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc468 } +0xff8dc468->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc468 } +Entering state 2 +Stack now 0 11 11 11 2 +0xff8dc528->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x578db3f4 'a') +-> $$ = nterm item (0xff8dc528 'a') +0x578db3f4->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc528 } +0x578db3f4->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc528 } +0xff8dc528->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc528 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xff8dc46f->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4 } +0xff8dc518->Object::Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc46f } +0xff8dc46f->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc46f, 0xff8dc518 } +Next token is token 'p' (0xff8dc518 'p'Exception caught: cleaning lookahead and stack +0x578db3f4->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0x578db3f4, 0xff8dc518 } +0x578db3e4->Object::~Object { 0x578db3c4, 0x578db3d4, 0x578db3e4, 0xff8dc518 } +0x578db3d4->Object::~Object { 0x578db3c4, 0x578db3d4, 0xff8dc518 } +0x578db3c4->Object::~Object { 0x578db3c4, 0xff8dc518 } +0xff8dc518->Object::~Object { 0xff8dc518 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr + +stdout: +exception caught: printer +./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaae +740. glr-regression.at:946: ok +stderr: +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 771. glr-regression.at:2149: testing Ambiguity reports: glr.c ... ./glr-regression.at:2149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ... +./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +770. glr-regression.at:2037: ok 772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ... ./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y + ./glr-regression.at:2149: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./glr-regression.at:2150: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./glr-regression.at:1861: $PREPARSER ./glr-regr16 +./glr-regression.at:1446: $PREPARSER ./glr-regr13 stderr: -syntax error -./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -763. glr-regression.at:1861: ok +./glr-regression.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +754. glr-regression.at:1446: stderr: + ok +stdout: +./glr-regression.at:1679: $PREPARSER ./glr-regr14 +./glr-regression.at:2150: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +757. glr-regression.at:1679: ok 773. glr-regression.at:2151: testing Ambiguity reports: glr2.cc ... ./glr-regression.at:2151: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +774. glr-regression.at:2229: testing Predicates: glr.c ... +./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +775. glr-regression.at:2230: testing Predicates: glr.cc ... +./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./glr-regression.at:1965: $PREPARSER ./glr-regr17 +./glr-regression.at:1038: $PREPARSER ./glr-regr9 +stderr: +memory exhausted +./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +743. glr-regression.at:1038: ok + +./glr-regression.at:2230: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +776. glr-regression.at:2231: testing Predicates: glr2.cc ... +./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./glr-regression.at:1786: $PREPARSER ./glr-regr15 +stderr: +Ambiguity detected. +Option 1, + ambiguity -> + ambiguity1 -> + +Option 2, + ambiguity -> + ambiguity2 -> + +syntax is ambiguous +./glr-regression.at:1786: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +760. glr-regression.at:1786: ok +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./glr-regression.at:1104: $PREPARSER ./glr-regr10 +stderr: +./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +746. glr-regression.at:1104: ok +stderr: +stdout: +./glr-regression.at:1964: $PREPARSER ./glr-regr17 stderr: Ambiguity detected. Option 1, @@ -271208,39 +270576,21 @@ empty2 -> 1.1-2.2: syntax is ambiguous -./glr-regression.at:1965: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -766. glr-regression.at:1965: ok - +./glr-regression.at:1964: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./glr-regression.at:1680: $PREPARSER ./glr-regr14 +765. glr-regression.at:1964: ok +./glr-regression.at:1176: $PREPARSER ./glr-regr11 stderr: -./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -758. glr-regression.at:1680: ok - -774. glr-regression.at:2229: testing Predicates: glr.c ... -./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -775. glr-regression.at:2230: testing Predicates: glr.cc ... -./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./glr-regression.at:2230: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +749. glr-regression.at:1176: ok stderr: stdout: -./glr-regression.at:1787: $PREPARSER ./glr-regr15 +./glr-regression.at:1861: $PREPARSER ./glr-regr16 stderr: -Ambiguity detected. -Option 1, - ambiguity -> - ambiguity1 -> - -Option 2, - ambiguity -> - ambiguity2 -> - -syntax is ambiguous -./glr-regression.at:1787: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -761. glr-regression.at:1787: ok - +syntax error +./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +763. glr-regression.at:1861: ok stderr: stdout: ./glr-regression.at:2149: $PREPARSER ./input --debug @@ -271306,12 +270656,32 @@ Cleanup: popping token 'a' () ./glr-regression.at:2149: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 771. glr-regression.at:2149: ok -776. glr-regression.at:2231: testing Predicates: glr2.cc ... -./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./glr-regression.at:2229: $PREPARSER ./input Nwin +stderr: +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Owin +stderr: +syntax error, unexpected 'n', expecting 'o' +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Owio +stderr: +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Nwio +stderr: +syntax error, unexpected 'o', expecting 'n' +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +774. glr-regression.at:2229: ok +stderr: +stdout: +./glr-regression.at:1312: $PREPARSER ./glr-regr12 +stderr: +stderr: +./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ./glr-regression.at:2150: $PREPARSER ./input --debug +752. glr-regression.at:1312: ok stderr: Starting parse Entering state 0 @@ -271376,28 +270746,11 @@ 772. glr-regression.at:2150: ok stderr: stdout: -./glr-regression.at:2229: $PREPARSER ./input Nwin -stderr: -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Owin -stderr: -syntax error, unexpected 'n', expecting 'o' -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Owio -stderr: -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Nwio -stderr: -syntax error, unexpected 'o', expecting 'n' -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -774. glr-regression.at:2229: ok -stderr: -stdout: -./glr-regression.at:1862: $PREPARSER ./glr-regr16 +./c++.at:858: $PREPARSER ./input stderr: -syntax error -./glr-regression.at:1862: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -764. glr-regression.at:1862: ok +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./glr-regression.at:2230: $PREPARSER ./input Nwin @@ -271412,9 +270765,314 @@ ./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./glr-regression.at:2230: $PREPARSER ./input Nwio stderr: +stdout: +stderr: +./glr-regression.at:1965: $PREPARSER ./glr-regr17 syntax error, unexpected 'o', expecting 'n' ./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Ambiguity detected. +Option 1, + start -> + ambig1 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty1 -> + +Option 2, + start -> + ambig2 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty2 -> + +1.1-2.2: syntax is ambiguous +./glr-regression.at:1965: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 775. glr-regression.at:2230: ok +766. glr-regression.at:1965: ok +stderr: +stdout: +./glr-regression.at:1447: $PREPARSER ./glr-regr13 +stderr: +./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +755. glr-regression.at:1447: ok +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./glr-regression.at:1680: $PREPARSER ./glr-regr14 +stderr: +./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +758. glr-regression.at:1680: ok +stderr: +stdout: +./glr-regression.at:1787: $PREPARSER ./glr-regr15 +stderr: +Ambiguity detected. +Option 1, + ambiguity -> + ambiguity1 -> + +Option 2, + ambiguity -> + ambiguity2 -> + +syntax is ambiguous +./glr-regression.at:1787: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +761. glr-regression.at:1787: ok +stderr: +stdout: +./c++.at:1362: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaap +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xff8cc8ff->Object::Object { } +0xff8cc9a8->Object::Object { 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'a' (0xff8cc9a8 'a') +0xff8cc8f8->Object::Object { 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0xff8cc8f8, 0xff8cc9a8 } +Shifting token 'a' (0xff8cc8f8 'a') +0x57f793c4->Object::Object { 0xff8cc8f8 } +0xff8cc8f8->Object::~Object { 0x57f793c4, 0xff8cc8f8 } +Entering state 2 +Stack now 0 2 +0xff8cc9b8->Object::Object { 0x57f793c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f793c4 'a') +-> $$ = nterm item (0xff8cc9b8 'a') +0x57f793c4->Object::~Object { 0x57f793c4, 0xff8cc9b8 } +0x57f793c4->Object::Object { 0xff8cc9b8 } +0xff8cc9b8->Object::~Object { 0x57f793c4, 0xff8cc9b8 } +Entering state 11 +Stack now 0 11 +Reading a token +0xff8cc8ff->Object::Object { 0x57f793c4 } +0xff8cc9a8->Object::Object { 0x57f793c4, 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0x57f793c4, 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'a' (0xff8cc9a8 'a') +0xff8cc8f8->Object::Object { 0x57f793c4, 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0x57f793c4, 0xff8cc8f8, 0xff8cc9a8 } +Shifting token 'a' (0xff8cc8f8 'a') +0x57f793d4->Object::Object { 0x57f793c4, 0xff8cc8f8 } +0xff8cc8f8->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc8f8 } +Entering state 2 +Stack now 0 11 2 +0xff8cc9b8->Object::Object { 0x57f793c4, 0x57f793d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f793d4 'a') +-> $$ = nterm item (0xff8cc9b8 'a') +0x57f793d4->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc9b8 } +0x57f793d4->Object::Object { 0x57f793c4, 0xff8cc9b8 } +0xff8cc9b8->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc9b8 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xff8cc8ff->Object::Object { 0x57f793c4, 0x57f793d4 } +0xff8cc9a8->Object::Object { 0x57f793c4, 0x57f793d4, 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'a' (0xff8cc9a8 'a') +0xff8cc8f8->Object::Object { 0x57f793c4, 0x57f793d4, 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc8f8, 0xff8cc9a8 } +Shifting token 'a' (0xff8cc8f8 'a') +0x57f793e4->Object::Object { 0x57f793c4, 0x57f793d4, 0xff8cc8f8 } +0xff8cc8f8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8f8 } +Entering state 2 +Stack now 0 11 11 2 +0xff8cc9b8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f793e4 'a') +-> $$ = nterm item (0xff8cc9b8 'a') +0x57f793e4->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9b8 } +0x57f793e4->Object::Object { 0x57f793c4, 0x57f793d4, 0xff8cc9b8 } +0xff8cc9b8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9b8 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xff8cc8ff->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4 } +0xff8cc9a8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'a' (0xff8cc9a8 'a') +0xff8cc8f8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8f8, 0xff8cc9a8 } +Shifting token 'a' (0xff8cc8f8 'a') +0x57f793f4->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8f8 } +0xff8cc8f8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc8f8 } +Entering state 2 +Stack now 0 11 11 11 2 +0xff8cc9b8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f793f4 'a') +-> $$ = nterm item (0xff8cc9b8 'a') +0x57f793f4->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc9b8 } +0x57f793f4->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9b8 } +0xff8cc9b8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc9b8 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xff8cc8ff->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4 } +0xff8cc9a8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'p' (0xff8cc9a8 'p'Exception caught: cleaning lookahead and stack +0x57f793f4->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc9a8 } +0x57f793e4->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9a8 } +0x57f793d4->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc9a8 } +0x57f793c4->Object::~Object { 0x57f793c4, 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0xff8cc9a8 } +exception caught: printer +end { } +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xff8cc8ff->Object::Object { } +0xff8cc9a8->Object::Object { 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'a' (0xff8cc9a8 'a') +0xff8cc8f8->Object::Object { 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0xff8cc8f8, 0xff8cc9a8 } +Shifting token 'a' (0xff8cc8f8 'a') +0x57f793c4->Object::Object { 0xff8cc8f8 } +0xff8cc8f8->Object::~Object { 0x57f793c4, 0xff8cc8f8 } +Entering state 2 +Stack now 0 2 +0xff8cc9b8->Object::Object { 0x57f793c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f793c4 'a') +-> $$ = nterm item (0xff8cc9b8 'a') +0x57f793c4->Object::~Object { 0x57f793c4, 0xff8cc9b8 } +0x57f793c4->Object::Object { 0xff8cc9b8 } +0xff8cc9b8->Object::~Object { 0x57f793c4, 0xff8cc9b8 } +Entering state 11 +Stack now 0 11 +Reading a token +0xff8cc8ff->Object::Object { 0x57f793c4 } +0xff8cc9a8->Object::Object { 0x57f793c4, 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0x57f793c4, 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'a' (0xff8cc9a8 'a') +0xff8cc8f8->Object::Object { 0x57f793c4, 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0x57f793c4, 0xff8cc8f8, 0xff8cc9a8 } +Shifting token 'a' (0xff8cc8f8 'a') +0x57f793d4->Object::Object { 0x57f793c4, 0xff8cc8f8 } +0xff8cc8f8->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc8f8 } +Entering state 2 +Stack now 0 11 2 +0xff8cc9b8->Object::Object { 0x57f793c4, 0x57f793d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f793d4 'a') +-> $$ = nterm item (0xff8cc9b8 'a') +0x57f793d4->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc9b8 } +0x57f793d4->Object::Object { 0x57f793c4, 0xff8cc9b8 } +0xff8cc9b8->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc9b8 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0xff8cc8ff->Object::Object { 0x57f793c4, 0x57f793d4 } +0xff8cc9a8->Object::Object { 0x57f793c4, 0x57f793d4, 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'a' (0xff8cc9a8 'a') +0xff8cc8f8->Object::Object { 0x57f793c4, 0x57f793d4, 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc8f8, 0xff8cc9a8 } +Shifting token 'a' (0xff8cc8f8 'a') +0x57f793e4->Object::Object { 0x57f793c4, 0x57f793d4, 0xff8cc8f8 } +0xff8cc8f8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8f8 } +Entering state 2 +Stack now 0 11 11 2 +0xff8cc9b8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f793e4 'a') +-> $$ = nterm item (0xff8cc9b8 'a') +0x57f793e4->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9b8 } +0x57f793e4->Object::Object { 0x57f793c4, 0x57f793d4, 0xff8cc9b8 } +0xff8cc9b8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9b8 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0xff8cc8ff->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4 } +0xff8cc9a8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'a' (0xff8cc9a8 'a') +0xff8cc8f8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8f8, 0xff8cc9a8 } +Shifting token 'a' (0xff8cc8f8 'a') +0x57f793f4->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc8f8 } +0xff8cc8f8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc8f8 } +Entering state 2 +Stack now 0 11 11 11 2 +0xff8cc9b8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57f793f4 'a') +-> $$ = nterm item (0xff8cc9b8 'a') +0x57f793f4->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc9b8 } +0x57f793f4->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9b8 } +0xff8cc9b8->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc9b8 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0xff8cc8ff->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4 } +0xff8cc9a8->Object::Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc8ff } +0xff8cc8ff->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc8ff, 0xff8cc9a8 } +Next token is token 'p' (0xff8cc9a8 'p'Exception caught: cleaning lookahead and stack +0x57f793f4->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0x57f793f4, 0xff8cc9a8 } +0x57f793e4->Object::~Object { 0x57f793c4, 0x57f793d4, 0x57f793e4, 0xff8cc9a8 } +0x57f793d4->Object::~Object { 0x57f793c4, 0x57f793d4, 0xff8cc9a8 } +0x57f793c4->Object::~Object { 0x57f793c4, 0xff8cc9a8 } +0xff8cc9a8->Object::~Object { 0xff8cc9a8 } +exception caught: printer +end { } +./c++.at:1362: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:1862: $PREPARSER ./glr-regr16 +./c++.at:1362: $PREPARSER ./input aaaaE +stderr: +syntax error +stderr: +./glr-regression.at:1862: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaT +764. glr-regression.at:1862: ok +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +687. c++.at:1362: ok stderr: stdout: ./glr-regression.at:1966: $PREPARSER ./glr-regr17 @@ -271443,8 +271101,19 @@ 767. glr-regression.at:1966: ok stderr: stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal +stderr: +stderr: +stdout: +exception caught: yylex ./glr-regression.at:2151: $PREPARSER ./input --debug +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1363: $PREPARSER ./input i Starting parse Entering state 0 Reading a token @@ -271505,7 +271174,240 @@ Cleanup: popping nterm b () Cleanup: popping token 'a' () ./glr-regression.at:2151: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 773. glr-regression.at:2151: ok +./c++.at:1363: $PREPARSER ./input aaaap +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffe60dff->Object::Object { } +0xffe60ea8->Object::Object { 0xffe60dff } +0xffe60dff->Object::~Object { 0xffe60dff, 0xffe60ea8 } +Next token is token 'a' (0xffe60ea8 'a') +0xffe60df8->Object::Object { 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0xffe60df8, 0xffe60ea8 } +Shifting token 'a' (0xffe60df8 'a') +0x57dd53c4->Object::Object { 0xffe60df8 } +0xffe60df8->Object::~Object { 0x57dd53c4, 0xffe60df8 } +Entering state 1 +Stack now 0 1 +0xffe60eb8->Object::Object { 0x57dd53c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57dd53c4 'a') +-> $$ = nterm item (0xffe60eb8 'a') +0x57dd53c4->Object::~Object { 0x57dd53c4, 0xffe60eb8 } +0x57dd53c4->Object::Object { 0xffe60eb8 } +0xffe60eb8->Object::~Object { 0x57dd53c4, 0xffe60eb8 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffe60dff->Object::Object { 0x57dd53c4 } +0xffe60ea8->Object::Object { 0x57dd53c4, 0xffe60dff } +0xffe60dff->Object::~Object { 0x57dd53c4, 0xffe60dff, 0xffe60ea8 } +Next token is token 'a' (0xffe60ea8 'a') +0xffe60df8->Object::Object { 0x57dd53c4, 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0x57dd53c4, 0xffe60df8, 0xffe60ea8 } +Shifting token 'a' (0xffe60df8 'a') +0x57dd53d4->Object::Object { 0x57dd53c4, 0xffe60df8 } +0xffe60df8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60df8 } +Entering state 1 +Stack now 0 10 1 +0xffe60eb8->Object::Object { 0x57dd53c4, 0x57dd53d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57dd53d4 'a') +-> $$ = nterm item (0xffe60eb8 'a') +0x57dd53d4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60eb8 } +0x57dd53d4->Object::Object { 0x57dd53c4, 0xffe60eb8 } +0xffe60eb8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60eb8 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffe60dff->Object::Object { 0x57dd53c4, 0x57dd53d4 } +0xffe60ea8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0xffe60dff } +0xffe60dff->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60dff, 0xffe60ea8 } +Next token is token 'a' (0xffe60ea8 'a') +0xffe60df8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60df8, 0xffe60ea8 } +Shifting token 'a' (0xffe60df8 'a') +0x57dd53e4->Object::Object { 0x57dd53c4, 0x57dd53d4, 0xffe60df8 } +0xffe60df8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60df8 } +Entering state 1 +Stack now 0 10 10 1 +0xffe60eb8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57dd53e4 'a') +-> $$ = nterm item (0xffe60eb8 'a') +0x57dd53e4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60eb8 } +0x57dd53e4->Object::Object { 0x57dd53c4, 0x57dd53d4, 0xffe60eb8 } +0xffe60eb8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60eb8 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffe60dff->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4 } +0xffe60ea8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60dff } +0xffe60dff->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60dff, 0xffe60ea8 } +Next token is token 'a' (0xffe60ea8 'a') +0xffe60df8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60df8, 0xffe60ea8 } +Shifting token 'a' (0xffe60df8 'a') +0x57dd53f4->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60df8 } +0xffe60df8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60df8 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffe60eb8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57dd53f4 'a') +-> $$ = nterm item (0xffe60eb8 'a') +0x57dd53f4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60eb8 } +0x57dd53f4->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60eb8 } +0xffe60eb8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60eb8 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffe60dff->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4 } +0xffe60ea8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60dff } +0xffe60dff->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60dff, 0xffe60ea8 } +Next token is token 'p' (0xffe60ea8 'p'Exception caught: cleaning lookahead and stack +0x57dd53f4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60ea8 } +0x57dd53e4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60ea8 } +0x57dd53d4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60ea8 } +0x57dd53c4->Object::~Object { 0x57dd53c4, 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0xffe60ea8 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0xffe60dff->Object::Object { } +0xffe60ea8->Object::Object { 0xffe60dff } +0xffe60dff->Object::~Object { 0xffe60dff, 0xffe60ea8 } +Next token is token 'a' (0xffe60ea8 'a') +0xffe60df8->Object::Object { 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0xffe60df8, 0xffe60ea8 } +Shifting token 'a' (0xffe60df8 'a') +0x57dd53c4->Object::Object { 0xffe60df8 } +0xffe60df8->Object::~Object { 0x57dd53c4, 0xffe60df8 } +Entering state 1 +Stack now 0 1 +0xffe60eb8->Object::Object { 0x57dd53c4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57dd53c4 'a') +-> $$ = nterm item (0xffe60eb8 'a') +0x57dd53c4->Object::~Object { 0x57dd53c4, 0xffe60eb8 } +0x57dd53c4->Object::Object { 0xffe60eb8 } +0xffe60eb8->Object::~Object { 0x57dd53c4, 0xffe60eb8 } +Entering state 10 +Stack now 0 10 +Reading a token +0xffe60dff->Object::Object { 0x57dd53c4 } +0xffe60ea8->Object::Object { 0x57dd53c4, 0xffe60dff } +0xffe60dff->Object::~Object { 0x57dd53c4, 0xffe60dff, 0xffe60ea8 } +Next token is token 'a' (0xffe60ea8 'a') +0xffe60df8->Object::Object { 0x57dd53c4, 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0x57dd53c4, 0xffe60df8, 0xffe60ea8 } +Shifting token 'a' (0xffe60df8 'a') +0x57dd53d4->Object::Object { 0x57dd53c4, 0xffe60df8 } +0xffe60df8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60df8 } +Entering state 1 +Stack now 0 10 1 +0xffe60eb8->Object::Object { 0x57dd53c4, 0x57dd53d4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57dd53d4 'a') +-> $$ = nterm item (0xffe60eb8 'a') +0x57dd53d4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60eb8 } +0x57dd53d4->Object::Object { 0x57dd53c4, 0xffe60eb8 } +0xffe60eb8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60eb8 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0xffe60dff->Object::Object { 0x57dd53c4, 0x57dd53d4 } +0xffe60ea8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0xffe60dff } +0xffe60dff->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60dff, 0xffe60ea8 } +Next token is token 'a' (0xffe60ea8 'a') +0xffe60df8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60df8, 0xffe60ea8 } +Shifting token 'a' (0xffe60df8 'a') +0x57dd53e4->Object::Object { 0x57dd53c4, 0x57dd53d4, 0xffe60df8 } +0xffe60df8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60df8 } +Entering state 1 +Stack now 0 10 10 1 +0xffe60eb8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57dd53e4 'a') +-> $$ = nterm item (0xffe60eb8 'a') +0x57dd53e4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60eb8 } +0x57dd53e4->Object::Object { 0x57dd53c4, 0x57dd53d4, 0xffe60eb8 } +0xffe60eb8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60eb8 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0xffe60dff->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4 } +0xffe60ea8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60dff } +0xffe60dff->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60dff, 0xffe60ea8 } +Next token is token 'a' (0xffe60ea8 'a') +0xffe60df8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60df8, 0xffe60ea8 } +Shifting token 'a' (0xffe60df8 'a') +0x57dd53f4->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60df8 } +0xffe60df8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60df8 } +Entering state 1 +Stack now 0 10 10 10 1 +0xffe60eb8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x57dd53f4 'a') +-> $$ = nterm item (0xffe60eb8 'a') +0x57dd53f4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60eb8 } +0x57dd53f4->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60eb8 } +0xffe60eb8->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60eb8 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0xffe60dff->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4 } +0xffe60ea8->Object::Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60dff } +0xffe60dff->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60dff, 0xffe60ea8 } +Next token is token 'p' (0xffe60ea8 'p'Exception caught: cleaning lookahead and stack +0x57dd53f4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0x57dd53f4, 0xffe60ea8 } +0x57dd53e4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0x57dd53e4, 0xffe60ea8 } +0x57dd53d4->Object::~Object { 0x57dd53c4, 0x57dd53d4, 0xffe60ea8 } +0x57dd53c4->Object::~Object { 0x57dd53c4, 0xffe60ea8 } +0xffe60ea8->Object::~Object { 0xffe60ea8 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +688. c++.at:1363: ok +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./glr-regression.at:2231: $PREPARSER ./input Nwin @@ -271523,6 +271425,140 @@ syntax error, unexpected 'o', expecting 'n' ./glr-regression.at:2231: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 776. glr-regression.at:2231: ok +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:860: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:860: $PREPARSER ./input +stderr: +./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +679. c++.at:854: ok ## ------------- ## ## Test results. ## @@ -271540,7 +271576,7 @@ dh_testroot dh_prep dh_auto_install - make -j8 install DESTDIR=/build/reproducible-path/bison-3.8.2\+dfsg/debian/tmp AM_UPDATE_INFO_DIR=no + make -j21 install DESTDIR=/build/reproducible-path/bison-3.8.2\+dfsg/debian/tmp AM_UPDATE_INFO_DIR=no make[1]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' make install-recursive make[2]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' @@ -271709,67 +271745,67 @@ make[4]: Entering directory '/build/reproducible-path/bison-3.8.2+dfsg' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' - /usr/bin/install -c -m 644 lib/liby.a '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' - /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /usr/bin/install -c src/bison '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' - /usr/bin/install -c src/yacc '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' + /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' + /usr/bin/install -c -m 644 lib/liby.a '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + /usr/bin/install -c src/bison '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' + /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' + /usr/bin/install -c src/yacc '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/bin' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' /usr/bin/install -c -m 644 examples/c++/calc++/driver.cc examples/c++/calc++/driver.hh examples/c++/calc++/scanner.ll examples/c++/calc++/calc++.cc examples/c++/calc++/parser.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' - ( cd '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' && ranlib liby.a ) - /usr/bin/install -c -m 644 examples/c/bistromathic/parse.y examples/c/bistromathic/Makefile examples/c/bistromathic/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' /usr/bin/install -c -m 644 examples/c++/simple.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' - /usr/bin/install -c -m 644 examples/c/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' - /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' - /usr/bin/install -c -m 644 examples/d/calc/calc.y examples/d/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' + /usr/bin/install -c -m 644 examples/c/bistromathic/parse.y examples/c/bistromathic/Makefile examples/c/bistromathic/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + /usr/bin/install -c -m 644 examples/c/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' + /usr/bin/install -c -m 644 examples/d/calc/calc.y examples/d/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' /usr/bin/install -c -m 644 examples/c++/calc++/README.md examples/c++/calc++/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + ( cd '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/lib/i386-linux-gnu' && ranlib liby.a ) + /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' - /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' /usr/bin/install -c -m 644 examples/d/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' - /usr/bin/install -c -m 644 examples/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' - /usr/bin/install -c -m 644 AUTHORS COPYING NEWS README THANKS TODO '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' - /usr/bin/install -c -m 644 examples/java/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' - /usr/bin/install -c -m 644 examples/c/glr/c++-types.y examples/c/glr/Makefile examples/c/glr/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' + /usr/bin/install -c -m 644 AUTHORS COPYING NEWS README THANKS TODO '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' + /usr/bin/install -c -m 644 examples/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' + /usr/bin/install -c -m 644 examples/c/glr/c++-types.y examples/c/glr/Makefile examples/c/glr/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' + /usr/bin/install -c -m 644 examples/java/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' /usr/bin/install -c -m 644 examples/java/calc/Calc.y examples/java/calc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' - /usr/bin/install -c -m 644 examples/java/simple/Calc.y examples/java/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' - /usr/bin/install -c -m 644 examples/c/lexcalc/parse.y examples/c/lexcalc/scan.l examples/c/lexcalc/Makefile examples/c/lexcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' + /usr/bin/install -c -m 644 examples/java/simple/Calc.y examples/java/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' - /usr/bin/install -c -m 644 data/m4sugar/foreach.m4 data/m4sugar/m4sugar.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' - /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' - /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' + /usr/bin/install -c -m 644 examples/c/lexcalc/parse.y examples/c/lexcalc/scan.l examples/c/lexcalc/Makefile examples/c/lexcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' + /usr/bin/install -c -m 644 data/m4sugar/foreach.m4 data/m4sugar/m4sugar.m4 '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' + /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' - /usr/bin/install -c -m 644 examples/c/rpcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' + /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' /usr/bin/install -c -m 644 examples/c/pushcalc/calc.y examples/c/pushcalc/Makefile examples/c/pushcalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' - /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' + /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' + /usr/bin/install -c -m 644 examples/c/rpcalc/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' /usr/bin/install -c -m 644 examples/d/simple/calc.y examples/d/simple/Makefile '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' -/usr/bin/mkdir -p doc /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' +/usr/bin/mkdir -p doc /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' LC_ALL=C tests/bison --version >doc/bison.help.tmp + /usr/bin/install -c -m 644 data/skeletons/bison.m4 data/skeletons/c++-skel.m4 data/skeletons/c++.m4 data/skeletons/c-like.m4 data/skeletons/c-skel.m4 data/skeletons/c.m4 data/skeletons/glr.c data/skeletons/glr.cc data/skeletons/glr2.cc data/skeletons/java-skel.m4 data/skeletons/java.m4 data/skeletons/lalr1.cc data/skeletons/lalr1.java data/skeletons/location.cc data/skeletons/stack.hh data/skeletons/traceon.m4 data/skeletons/variant.hh data/skeletons/yacc.c data/skeletons/d-skel.m4 data/skeletons/d.m4 data/skeletons/lalr1.d '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' /usr/bin/install -c -m 644 data/xslt/bison.xsl data/xslt/xml2dot.xsl data/xslt/xml2text.xsl data/xslt/xml2xhtml.xsl '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' /usr/bin/mkdir -p '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' - /usr/bin/install -c -m 644 data/skeletons/bison.m4 data/skeletons/c++-skel.m4 data/skeletons/c++.m4 data/skeletons/c-like.m4 data/skeletons/c-skel.m4 data/skeletons/c.m4 data/skeletons/glr.c data/skeletons/glr.cc data/skeletons/glr2.cc data/skeletons/java-skel.m4 data/skeletons/java.m4 data/skeletons/lalr1.cc data/skeletons/lalr1.java data/skeletons/location.cc data/skeletons/stack.hh data/skeletons/traceon.m4 data/skeletons/variant.hh data/skeletons/yacc.c data/skeletons/d-skel.m4 data/skeletons/d.m4 data/skeletons/lalr1.d '/build/reproducible-path/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp @@ -271800,117 +271836,117 @@ dh_perl dh_link dh_strip_nondeterminism + Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo Garbage at end of string in strptime: +02:00 at /usr/lib/i386-linux-gnu/perl/5.38/Time/Piece.pm line 598. Perhaps a format flag did not match the actual input? at /usr/lib/i386-linux-gnu/perl/5.38/Time/Piece.pm line 598. - Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sk/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sk/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/rw/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison.mo dh_compress dh_fixperms dh_missing @@ -271932,12 +271968,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: including full source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/40155/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/40155/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/37675 and its subdirectories -I: Current time: Tue Apr 2 00:28:55 -12 2024 -I: pbuilder-time-stamp: 1712060935 +I: removing directory /srv/workspace/pbuilder/40155 and its subdirectories +I: Current time: Tue May 6 08:56:23 +14 2025 +I: pbuilder-time-stamp: 1746471383